import firebase_admin from firebase_admin import credentials from firebase_admin import db from firebase_admin import messaging import time import threading import pandas as pd import os import datetime from zipfile import ZipFile import json from numba import jit, cuda from google.cloud import storage import os from firebase_admin import db from get_mask import get_mask import cv2 import scipy.ndimage from google.oauth2 import service_account import datetime import traceback from datetime import date storage_client = storage.Client.from_service_account_json("servicekey.json"); bucket_name = 'farmbase-b2f7e.appspot.com' cred = credentials.Certificate('servicekey.json') try: firebase_admin.initialize_app(cred, {'databaseURL': 'https://farmbase-b2f7e-31c0c.firebaseio.com/'}) except: print('fire running') uid= 'TCXcp5VIsfhHZrh0nm2VsgBtcGy2' #server2022('snQYQZqQx3SmVbRztmEqYn5Mkcz2', None) gavl_list = db.reference('PaidMonitoredFields').child('PMF').child(uid).get(False,True) #gavl_list = db.reference('PaidMonitoredFields').child('PMF').order_by_child('FieldArea').limit_to_first(2).get() import psycopg2 #Establishing the connection conn = psycopg2.connect( database="gavl-postgis", user='postgres', password='P7^6fL9Q}h2v$UvA', host='34.68.167.51', port= '5432' ) #Setting auto commit false conn.autocommit = True #Creating a cursor object using the cursor() method cursor = conn.cursor() #Executing an MYSQL function using the execute() method cursor.execute("select version()") # Fetch a single row using fetchone() method. data = cursor.fetchone() print("Connection established to: ",data) def getVal(key, obj): try: val = obj[key] except: val = '' return val def make_folder_url(blob): url = blob.generate_signed_url( version="v4", # This URL is valid for 15 minutes expiration=datetime.timedelta(minutes=1500), # Allow PUT requests using this URL. method="GET", credentials = cred ) return url def make_and_upload_zip(uid): try: files = os.listdir((uid + '_ZipKML')) zip_file_name = uid + '_ZipKML/reports.zip' zipObj = ZipFile(zip_file_name,'w') for temp_file in files: zipObj.write((uid + '_ZipKML/' + temp_file)) zipObj.close() blob_name = 'GAVLZip/data.zip' bucket = storage_client.bucket(bucket_name) blob = bucket.blob(blob_name) blob.upload_from_filename(zip_file_name) url = make_folder_url(blob) except: print(traceback.format_exc()) url = "" return url mainObj =[] json_obj = {} json_obj["type"] = "FeatureCollection" json_obj["features"] = [] fieldids = ['1611730411677','1611730551186','1611730664187','1611730834639','1611731010687','1611731098888','1611731265986','1611731498286','1611731614762','1611731704188','1611731797145','1611745120936'] for fieldid in fieldids: #for (fieldid, temp) in gavl_list.items(): fieldobj = db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).get() try: fieldAddrss = fieldobj["FieldAddress"] if "fined" in fieldAddrss: fieldAddrss = ' ' except: fieldAddrss = ' ' try: fieldDes = fieldobj["FieldDescription"] except: fieldDes = '' try: fullAddress = fieldobj["FullAddress"] except: fullAddress = '' mainAddress = fieldAddrss + fieldDes + fullAddress try: polygonsObj = fieldobj["Polygons"] for(polygonid, polygonObj) in polygonsObj.items(): tempPropertiesObj = {} # my_datetime = datetime.datetime.fromtimestamp(float(getVal("polygonID", polygonObj)) / 1000) # Apply fromtimestamp function # tempPropertiesObj["GeotaggingDate"] = my_datetime # getVal("polygonID", polygonObj) # tempPropertiesObj["AddedBy"] = getVal("AddedByPhoneNumber", polygonObj) # tempPropertiesObj["Area_SqM"] = getVal("Area", polygonObj) # # tempPropertiesObj["Area_Ha"] = round(float(getVal("Area", polygonObj))/10000,2) # tempPropertiesObj["Area_Acres"] = round(float(getVal("Area", polygonObj))/4047,2) # tempPropertiesObj["FarmerCode"] = getVal("Name", polygonObj) # tempPropertiesObj["LandID"] = getVal("PhoneNumber", polygonObj) # tempPropertiesObj["FieldID"] = getVal("fieldID", polygonObj) # tempPropertiesObj["PolygonID"] = getVal("polygonID", polygonObj) # tempPropertiesObj["Address"] = mainAddress pointsObj = getVal("Coordinates", polygonObj) single_geometry_obj = {} single_geometry_obj["type"] = "Feature" single_geometry_obj["geometry"] = {} single_geometry_obj["geometry"]["type"] = "Polygon" single_geometry_obj["properties"] = tempPropertiesObj singleFieldPointsArr = [] try: for (pointkey, pointobj) in pointsObj.items(): pointArr = [pointobj["Longitude"], pointobj["Latitude"]] singleFieldPointsArr.append(pointArr) main_points_arr = [] main_points_arr.append(singleFieldPointsArr) single_geometry_obj["geometry"]["coordinates"] = main_points_arr json_obj["features"].append(single_geometry_obj) mainObj.append(tempPropertiesObj) except: print(traceback.format_exc()) except: print(traceback.format_exc()) #print(json_obj) json_object = json.dumps(json_obj, indent=4) reports_folder = uid + '_ZipML' isdir = os.path.isdir(reports_folder) if isdir != True: os.mkdir(reports_folder) # Writing to sample.json with open((reports_folder +"/gavl_json_obj.json"), "w") as outfile: outfile.write(json_object) try: pd.DataFrame(mainObj).to_excel((reports_folder + '/gavl.xlsx')) except: pd.DataFrame(mainObj).to_csv((reports_folder + '/gavl.csv')) make_and_upload_zip(uid)