import json import pandas as pd from datetime import datetime from firebase_admin import credentials, auth import firebase_admin from firebase_admin import credentials from firebase_admin import db from firebase_admin import messaging import time import threading import pandas as pd import os import datetime from zipfile import ZipFile import json from numba import jit, cuda from google.cloud import storage import os from firebase_admin import db import cv2 import scipy.ndimage from google.oauth2 import service_account import datetime import requests import json import traceback from datetime import date storage_client = storage.Client.from_service_account_json("servicekey.json"); bucket_name = 'farmbase-b2f7e.appspot.com' cred = credentials.Certificate('servicekey.json') try: firebase_admin.initialize_app(cred, {'databaseURL': 'https://farmbase-b2f7e-31c0c.firebaseio.com/'}) except: print('fire running') from datetime import datetime as dt def json_to_excel(data,uuid, field_id,lat,long, excel_file_path): temp_obj={} temp_obj['uuid']=uuid temp_obj['field_id']=field_id temp_obj['lat']=lat temp_obj['long']=long # Create a DataFrame with the specified columns and dates columns = ['uuid','field_id','lat','long','ndvi', 'evi', 'savi', 'ndre', 'ndmi', 'soc'] dates = list(data['ndvi'].keys()) formatted_dates = [dt.strptime(date, '%Y%m%d').strftime('%d-%m-%Y') for date in dates] df = pd.DataFrame(index=formatted_dates, columns=columns) # Populate the DataFrame with the values for column in columns: for date in dates: formatted_date = dt.strptime(date, '%Y%m%d').strftime('%d-%m-%Y') if column not in ['uuid', 'field_id', 'lat', 'long']: if column == 'soc': sum_avg = float(data['ndvi'].get(date, 0)) + float(data['evi'].get(date, 0)) + float(data['savi'].get(date, 0)) + float(data['ndre'].get(date, 0)) + float(data['ndmi'].get(date, 0)) new_soc = min(0.3, round(float(sum_avg/500),3)) df.at[formatted_date, column] = new_soc else: df.at[formatted_date, column] = round(float(data[column].get(date, 0))/100,3) else: df.at[formatted_date, column] = temp_obj[column] # Write the DataFrame to an Excel file df.to_excel(excel_file_path, sheet_name='Sheet1') uid = 'ipRHhCOFIDV2pxgg7Nfz1ufZBmV2' gavl_list = db.reference('PaidMonitoredFields').child('PMF').child(uid).get(False,False) filename = "UUID_FieldID_coordinates.xlsx" df3 = pd.read_excel(filename) api_key = "dd6e11f5d20e4694bed72932241007" file_count = 0 field_ids = [] total_done = 0 for index, row in df3.iterrows(): field_id = str(row["Field ID"]) uuid = row["UUID"] lat = row["Latitude"] long = row["Longitude"] file_name = str(uuid) + "_" + str(field_id) + "_FieldID_weatherdata.xlsx" #field_ids.append(str(field_id)) print(uuid, field_id) health_obj = db.reference('PaidMonitoredFields').child('PMF').child(uid).child(field_id).child('Health').get() if health_obj is not None: try: json_to_excel(health_obj,uuid, field_id,lat,long, ('ifpri_satellite/'+ str(uuid)+ '_' + str(field_id) + '.xlsx')) total_done = total_done + 1 print(total_done) #time.sleep(100000) except: print(traceback.format_exc()) else: print('no data', uuid, field_id) print(total_done) time.sleep(100000)