print('entered') import os import time import fiona import requests import utm from geopy.geocoders import Nominatim import time import json import pandas as pd from fastkml import kml from socket import * from oct2py import octave from io import BytesIO import base64 import xlrd import csv from statistics import median import math import firebase_admin from firebase_admin import credentials from firebase_admin import db from firebase_admin import messaging import threading import xlsxwriter import numpy as np import datetime import os from numba import jit, cuda from firebase_admin import firestore from make_on_the_go_farm_image import make_on_the_go_farm_image from sentinelhub import ( WebFeatureService, BBox, CRS, MimeType, CRS, BBox, WmsRequest, DataCollection, ) from get_polygon_mask import get_polygon_mask import traceback from google.cloud import storage import os from google.oauth2 import service_account import datetime from datetime import date import traceback from gen_report_new2 import gen_report_new from google.cloud import storage from oauth2client.service_account import ServiceAccountCredentials import os import firebase_admin from firebase_admin import credentials from firebase_admin import db import datetime storage_client = storage.Client.from_service_account_json("servicekey.json") bucket_name = "farmbase-b2f7e.appspot.com" cred = service_account.Credentials.from_service_account_file("servicekey.json") # cred = credentials.Certificate('servicekey.json') bucket = storage_client.bucket(bucket_name) try: firebase_admin.initialize_app( credentials.Certificate("servicekey.json"), {"databaseURL": "https://farmbase-b2f7e-31c0c.firebaseio.com/"}, ) except: print("fire running") print('step 1') db_firestore = firestore.client() sentinelSettings = db.reference("SentinelSettings").get() clientID = sentinelSettings["ClientID"] clientSecret = sentinelSettings["ClientSecret"] wmsID = sentinelSettings["WMSID"] rviID = sentinelSettings["RVIID"] demID = sentinelSettings["DEMID"] images_arr = ["TCI", "NDVI", "NDWI", "NDRE", "RECL", "RVI", "RSM"] day_gap = 5 today = date.today() ndvi_val, ndwi_val = 0, 0 imageDate = today.strftime("%Y%m%d") print('step 2') kkk = 0 uid = 'TCXcp5VIsfhHZrh0nm2VsgBtcGy2' if not os.path.exists(uid): os.makedirs(uid) all_polygons_arr = [] n_tmux = 28 print('step 4') all_polygons_arr = [] missing_polygons_arr = [] main_final_obj = {} polygon_num = 1 iter = 0 field_count = 0 temp_list = db.reference("PaidMonitoredFields").child("PMF").child(uid).get(False, True) for fieldid, tmp in temp_list.items(): polygons_obj = db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child('Polygons').get(False, True) #temp_final_obj = {} try: for polygonid, temp_p in polygons_obj.items(): polygon_obj = db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child('Polygons').child(polygonid).get() main_final_obj[polygonid] = polygon_obj # if polygon_num < 5000: # polygon_num = polygon_num + 1 # else: # json_object = json.dumps(temp_final_obj, indent=4) # with open(("gavl_polygons_obj_" + str(iter) + ".json"), "w") as outfile: # outfile.write(json_object) # temp_final_obj = {} # iter = iter + 1 # polygon_num = 0 #main_final_obj.append(polygon_obj) try: sensed_days_obj = polygon_obj["SensedDays"] #health_obj = polygon_obj["Health"] temp_days_num = 0 for temp_key, val in sensed_days_obj.items(): temp_days_num = temp_days_num + 1 if temp_days_num > 30: field_count = field_count + 1 # health_obj = polygon_obj["Health"] # sensed_days_arr = [] # for sensed_day, tmp in sensed_days_obj.items(): # sensed_day = sensed_day.replace("-","") # sensed_days_arr.append(sensed_day) # latest_day = max(sensed_days_arr) # latest_day = str(latest_day) # new_date = latest_day[6:] + "-" + latest_day[4:6] + "-" + latest_day[:4] # latest_health_obj = health_obj[latest_day] # ndvi = latest_health_obj.get("NDVI", None) # ndwi = latest_health_obj.get("NDWI", None) # ndre = latest_health_obj.get("NDRE", None) # recl = latest_health_obj.get("RECL", None) # rvi = latest_health_obj.get("RVI", None) # rsm = latest_health_obj.get("RSM", None) # if ndvi is None or rvi is None: # missing_polygons_arr.append([fieldid, polygonid]) # farmer_code = polygon_obj["Name"] # land_id = polygon_obj["PhoneNumber"] # all_polygons_arr.append([farmer_code, land_id, new_date, ndvi, ndwi, ndre, recl, rvi, rsm, str(fieldid), str(polygonid)]) except: print(traceback.format_exc()) #main_final_obj[fieldid] = temp_final_obj # json_object = json.dumps(temp_final_obj, indent=4) # iter = iter + 1 # polygon_num = 0 except: print(traceback.format_exc()) print(field_count) time.sleep(86400) iter = 1 temp_obj = {} polygon_num = 0 for polygonid, polygon_obj in main_final_obj.items(): polygon_num =polygon_num + 1 temp_obj[polygonid] = polygon_obj if polygon_num > 5000: json_object = json.dumps(temp_obj, indent=4) with open(("gavl_polygons_obj_" + str(iter) + ".json"), "w") as outfile: outfile.write(json_object) temp_obj = {} iter = iter + 1 polygon_num = 0 else: polygon_num = polygon_num + 1 iter = iter + 1 json_object = json.dumps(temp_obj, indent=4) with open(("gavl_polygons_obj_" + str(iter) + ".json"), "w") as outfile: outfile.write(json_object) with open('missing_polygons.txt', 'w') as f: # using csv.writer method from CSV package write = csv.writer(f) write.writerow(['Field ID', 'Polygon ID']) write.writerows(missing_polygons_arr) with open('gavl_new.csv', 'w') as f: # using csv.writer method from CSV package write = csv.writer(f) write.writerow(['Farmer Code', 'Land ID', 'Satellite Visit Date', 'NDVI', 'NDWI', 'NDRE', 'RECL', 'RVI','RSM', 'Field ID', 'Polygon ID']) write.writerows(all_polygons_arr) # with open('all_gavl_polygons.txt', 'w') as f: # # using csv.writer method from CSV package # write = csv.writer(f) # write.writerows(main_final_obj)