from itertools import zip_longest from tempfile import tempdir import firebase_admin from firebase_admin import credentials from firebase_admin import db from find_s1_image import find_img_value import pandas as pd from sklearn.metrics import classification_report, confusion_matrix, accuracy_score import statsmodels.api as sm import seaborn as sns sns.set() from zipfile import ZipFile from sklearn.ensemble import RandomForestRegressor from sklearn.model_selection import train_test_split import csv import time import json from find_modis_ndvi import find_modis_ndvi import numpy as np from sklearn.cluster import KMeans from find_study_area_values import find_study_area_values from find_study_area_values3 import find_study_area_values3 from make_area_estimate_image import make_area_estimate_image from make_egypt_estimate_image import make_egypt_estimate_image from sentinelhub import WebFeatureService, BBox, CRS, MimeType, CRS, BBox, WmsRequest,DataCollection import traceback from firebase_admin import firestore from PIL import Image import numpy as np from google.cloud import storage import os from firebase_admin import db from get_mask import get_mask import cv2 import scipy.ndimage from google.oauth2 import service_account import datetime from datetime import date storage_client = storage.Client.from_service_account_json("servicekey.json"); bucket_name = 'farmbase-b2f7e.appspot.com' cred = service_account.Credentials.from_service_account_file('servicekey.json') #cred = credentials.Certificate('servicekey.json') bucket = storage_client.bucket(bucket_name) try: firebase_admin.initialize_app(credentials.Certificate('servicekey.json'), {'databaseURL': 'https://farmbase-b2f7e-31c0c.firebaseio.com/'}) except: print('fire running') db_firestore = firestore.client() def make_reports_zip(): #storage_client = storage.Client() today = date.today() d1 = today.strftime('%Y%m%d') present_timestamp = round(time.time())*1000 timestamp_threshold = 12*60*60*1000 zip_requests_obj = db_firestore.collection(u'ReportZipRequests').where('IsReportRequested','==',1).get() for temp_d in zip_requests_obj: uid = temp_d.id print(temp_d.id) print(temp_d.to_dict()) temp_main_obj = temp_d.to_dict() try: last_timestamp = temp_main_obj["ZipTimestamp"] except: last_timestamp = 0 print([last_timestamp, present_timestamp,(present_timestamp - last_timestamp), timestamp_threshold]) if last_timestamp == 0 or (present_timestamp - last_timestamp)> timestamp_threshold: reports_folder = uid + '_ZipReports' isdir = os.path.isdir(reports_folder) if isdir != True: os.mkdir(reports_folder) else: os.system(("rm -rf " + reports_folder)) os.mkdir(reports_folder) # zip_requests_obj = zip_requests_obj.to_dict() # print(zip_requests_obj) # for (uid, temp) in zip_requests_obj.items(): fields = db.reference('PaidMonitoredFields').child('PMF').child(uid).get() try: for (fieldid, fieldobj) in fields.items(): senseddays = try_obj(fieldobj, 'SensedDays') languages = get_languages(fieldobj, 'Language') try: languages = languages.split(",") except Exception as e: print(e) address = get_field_name(fieldobj) if senseddays is not None: for (temp_day, temp) in senseddays.items(): latest_date = temp_day else: latest_date = '0' print([uid, fieldid, d1, latest_date]) if int(d1) - int(latest_date) < 5: get_and_upload_file(uid, fieldid, latest_date, languages, address, storage_client, bucket_name) except Exception as e: print(e) # blob_name = 'ZipReports/' + uid # blob = bucket.blob(blob_name) if last_timestamp == 0 or (present_timestamp - last_timestamp)> timestamp_threshold: url = make_and_upload_zip(uid) print(url) temp_main_obj["URL"] = url temp_main_obj["ZipTimestamp"] = round(time.time())*1000 temp_main_obj["IsReportRequested"] = 0 db_firestore.collection('ReportZipRequests').document(uid).set(temp_main_obj) def make_and_upload_zip(uid): files = os.listdir((uid + '_ZipReports')) zip_file_name = uid + '_ZipReports/reports.zip' zipObj = ZipFile(zip_file_name,'w') for temp_file in files: zipObj.write((uid + '_ZipReports/' + temp_file)) zipObj.close() blob_name = 'ZipReports/' + uid + '/reports.zip' bucket = storage_client.bucket(bucket_name) blob = bucket.blob(blob_name) blob.upload_from_filename(zip_file_name) url = make_folder_url(blob) return url def get_field_name(obj): try: val = obj['FieldDescription'] except: try: val = obj['FieldAddress'] except: val = "-" return val def make_folder_url(blob): url = blob.generate_signed_url( version="v4", # This URL is valid for 15 minutes expiration=datetime.timedelta(minutes=1500), # Allow PUT requests using this URL. method="GET", credentials = cred ) return url def get_and_upload_file(uid, fieldid, latest_date, languages, address, storage_client, bucket_name): if languages is not None: for lang in languages: if lang != "en": try: file_name = 'Field_Report_' + address + "_"+ lang + '.pdf' blob_name = 'PaidMonitoredFields/' + uid + '/' + fieldid+ '/' + latest_date + '/report' + lang + '.pdf' bucket = storage_client.bucket(bucket_name) source_blob = bucket.blob(blob_name) source_blob.download_to_filename((uid + '_ZipReports/' + file_name)) except Exception as e: print(e) else: file_name = 'Field_Report_' + address + '.pdf' blob_name = 'PaidMonitoredFields/' + uid + '/' + fieldid+ '/' + latest_date + '/report.pdf' bucket = storage_client.bucket(bucket_name) source_blob = bucket.blob(blob_name) try: source_blob.download_to_filename((uid + '_ZipReports/' + file_name)) print('downloaded') except Exception as e: print(e) def get_languages(obj, key): try: val = obj[key] val = val.split(',') except: val = None return val def try_obj(obj, key): try: val = obj[key] except: val = None return val