Now uses an excel sheet as a template file and populates a sheet with datapoints. Chart already exist on template and get populated with the data on the second sheet.
306 lines
15 KiB
Python
306 lines
15 KiB
Python
import logging, boto3, pytz, shutil
|
|
from openpyxl.utils.datetime import to_excel
|
|
import pandas as pd
|
|
from datetime import datetime as dt
|
|
from datetime import timedelta as td
|
|
import datetime as dtf
|
|
from tb_rest_client.rest_client_ce import *
|
|
from tb_rest_client.rest import ApiException
|
|
from email.mime.multipart import MIMEMultipart
|
|
from email.mime.text import MIMEText
|
|
from email import encoders
|
|
from email.mime.base import MIMEBase
|
|
|
|
|
|
logging.basicConfig(level=logging.DEBUG,
|
|
format='%(asctime)s - %(levelname)s - %(module)s - %(lineno)d - %(message)s',
|
|
datefmt='%Y-%m-%d %H:%M:%S')
|
|
|
|
|
|
# ThingsBoard REST API URL
|
|
url = "https://www.enxlekkocloud.com" #"https://hp.henrypump.cloud"
|
|
# Default Tenant Administrator credentials
|
|
username = "nico.a.melone@gmail.com" #"henry.pump.automation@gmail.com"
|
|
password = "9EE#mqb*b6bXV9hJrPYGm&w3q5Y@3acumvvb5isQ" #"Henry Pump @ 2022"
|
|
|
|
|
|
def getDevices(rest_client, customers,target_customer, page=0, pageSize=500):
|
|
for c in customers.data:
|
|
if c.name == target_customer:
|
|
cid = c.id.id
|
|
devices = rest_client.get_customer_devices(customer_id=cid, page_size=pageSize, page=page)
|
|
return devices #.to_dict()
|
|
|
|
|
|
def getDeviceKeys(rest_client, devices,target_device):
|
|
try:
|
|
for d in devices.data:
|
|
if d.name == target_device:
|
|
device = d
|
|
keys = rest_client.get_timeseries_keys_v1(d.id)
|
|
return device, keys, None
|
|
return None, None,"Device Not Found"
|
|
except Exception as e:
|
|
logging.error("Something went wrong in getDeviceKeys")
|
|
logging.error(e)
|
|
return (None, None, e)
|
|
|
|
|
|
def getTelemetry(rest_client, device, keys, start_ts, end_ts,limit):
|
|
try:
|
|
return rest_client.get_timeseries(entity_id=device.id, keys=keys, start_ts=start_ts, end_ts=end_ts, limit=limit) #entity_type=entity_type,
|
|
except Exception as e:
|
|
logging.error("Something went wrong in getTelemetry")
|
|
logging.error(e)
|
|
return False
|
|
|
|
|
|
|
|
def getTime(timeRequest):
|
|
start_ts, end_ts = 0,0
|
|
if timeRequest["type"] == "last":
|
|
now = dt.now()
|
|
delta = td(days=timeRequest["days"], seconds=timeRequest["seconds"], microseconds=timeRequest["microseconds"], milliseconds=timeRequest["milliseconds"], minutes=timeRequest["minutes"], hours=timeRequest["hours"], weeks=timeRequest["weeks"])
|
|
start_ts = str(int(dt.timestamp(now - delta) * 1000))
|
|
end_ts = str(int(dt.timestamp(now) * 1000))
|
|
elif timeRequest["type"] == "midnight-midnight":
|
|
timezone = pytz.timezone(timeRequest["timezone"])
|
|
today = dtf.date.today()
|
|
yesterday_midnight = dtf.datetime.combine(today - dtf.timedelta(days=1), dtf.time())
|
|
today_midnight = dtf.datetime.combine(today, dtf.time())
|
|
yesterday_midnight = timezone.localize(yesterday_midnight)
|
|
today_midnight = timezone.localize(today_midnight)
|
|
start_ts = int(yesterday_midnight.timestamp()) * 1000
|
|
end_ts = int(today_midnight.timestamp()) * 1000
|
|
elif timeRequest["type"] == "range":
|
|
start_ts = timeRequest["ts_start"]
|
|
end_ts = timeRequest["ts_end"]
|
|
return (start_ts, end_ts)
|
|
|
|
|
|
def getThingsBoardData(url, username, password, targetCustomer, timeRequest):
|
|
# Creating the REST client object with context manager to get auto token refresh
|
|
with RestClientCE(base_url=url) as rest_client:
|
|
try:
|
|
# Auth with credentials
|
|
rest_client.login(username=username, password=password)
|
|
# Get customers > get devices under a target customer > get keys for devices > get data for devices
|
|
customers = rest_client.get_customers(page_size="100", page="0")
|
|
devices = getDevices(rest_client=rest_client, customers=customers, target_customer=targetCustomer)
|
|
telemetry = {}
|
|
for d in devices.data:
|
|
#print(d.name)
|
|
device, keys, err = getDeviceKeys(rest_client=rest_client, devices=devices, target_device=d.name)
|
|
start_ts, end_ts = getTime(timeRequest)
|
|
#print(keys)
|
|
telemetry[d.name] = getTelemetry(rest_client=rest_client, device=device, keys=','.join(keys), start_ts=start_ts, end_ts=end_ts, limit=25000)
|
|
return telemetry
|
|
except ApiException as e:
|
|
logging.error(e)
|
|
return False
|
|
|
|
|
|
|
|
def getMaxWidth():
|
|
label_mapping = {
|
|
"Lit 116b Level": "WASTE TANK 1",
|
|
"Lit 116a Level": "WASTE TANK 2",
|
|
"Fit 100 Flow Rate": "INLET FLOW RATE",
|
|
"Fit 109b Flow Rate": "SALES FLOW RATE",
|
|
"Outlet Turbidity Temp": "OUTLET TURBIDITY TEMP",
|
|
"Outlet Orp Temp": "OUTLET ORP TEMP",
|
|
"Inlet Turbidity Temp": "INLET TURBIDITY TEMP",
|
|
"Inlet Ph Temp": "INLET PH TEMP",
|
|
"Ait 102b H2s": "INLET H₂S",
|
|
"At 109b H2s": "OUTLET H₂S",
|
|
"At 109c Oil In Water": "OUTLET OIL IN WATER",
|
|
"Ait 102a Turbitity": "INLET TURBIDITY",
|
|
"At 109a Turbidity": "OUTLET TURBIDITY",
|
|
"At 109e Orp": "OUTLET ORP"
|
|
}
|
|
width = 0
|
|
for key,value in label_mapping.items():
|
|
if(len(value) > width):
|
|
width = len(value)
|
|
|
|
return width
|
|
|
|
|
|
def formatColumnName(telemetryName):
|
|
name = " ".join([x.capitalize() for x in telemetryName.split("_")])
|
|
label_mapping = {
|
|
"Lit 116b Level": "WASTE TANK 1",
|
|
"Lit 116a Level": "WASTE TANK 2",
|
|
"Fit 100 Flow Rate": "INLET FLOW RATE",
|
|
"Fit 109b Flow Rate": "SALES FLOW RATE",
|
|
"Outlet Turbidity Temp": "OUTLET TURBIDITY TEMP",
|
|
"Outlet Orp Temp": "OUTLET ORP TEMP",
|
|
"Inlet Turbidity Temp": "INLET TURBIDITY TEMP",
|
|
"Inlet Ph Temp": "INLET PH TEMP",
|
|
"Ait 102b H2s": "INLET H₂S",
|
|
"At 109b H2s": "OUTLET H₂S",
|
|
"At 109c Oil In Water": "OUTLET OIL IN WATER",
|
|
"Ait 102a Turbitity": "INLET TURBIDITY",
|
|
"At 109a Turbidity": "OUTLET TURBIDITY",
|
|
"At 109e Orp": "OUTLET ORP"
|
|
}
|
|
return label_mapping.get(name)
|
|
|
|
|
|
def formatChartName(telemetryName):
|
|
return " ".join([x.upper() for x in telemetryName.split("_")])
|
|
|
|
|
|
def getDataFrame(telemetry, ignore_keys, time):
|
|
df = pd.DataFrame()
|
|
#for location in telemetry.keys():
|
|
# Iterate through each datapoint within each location
|
|
for datapoint in telemetry.keys():
|
|
# Convert the datapoint list of dictionaries to a DataFrame
|
|
if datapoint not in ignore_keys:
|
|
temp_df = pd.DataFrame(telemetry[datapoint])
|
|
temp_df['ts'] = pd.to_datetime(temp_df['ts'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(time["timezone"]).dt.tz_localize(None)
|
|
# Set 'ts' as the index
|
|
temp_df.set_index('ts', inplace=True)
|
|
temp_df["value"] = pd.to_numeric(temp_df["value"], errors="coerce")
|
|
# Rename 'value' column to the name of the datapoint
|
|
temp_df.rename(columns={'value': formatColumnName(datapoint)}, inplace=True)
|
|
|
|
# Join the temp_df to the main DataFrame
|
|
df = df.join(temp_df, how='outer')
|
|
df.ffill()
|
|
#df = df.fillna(method='ffill', limit=2)
|
|
# Rename index to 'Date'
|
|
df.rename_axis('Date', inplace=True)
|
|
return df
|
|
|
|
|
|
def get_last_data_row(ws):
|
|
# Start from the bottom row and work up to find the last row with data
|
|
for row in range(ws.max_row, 0, -1):
|
|
if any(cell.value is not None for cell in ws[row]):
|
|
return row
|
|
return 0 # If no data is found, return 0
|
|
|
|
|
|
time = {
|
|
"type": "last",
|
|
"days":3,
|
|
"seconds":0,
|
|
"microseconds":0,
|
|
"milliseconds":0,
|
|
"minutes":0,
|
|
"hours":0,
|
|
"weeks":0,
|
|
"timezone": "US/Central"
|
|
}
|
|
time = {
|
|
"type": "midnight-midnight",
|
|
"timezone": "US/Alaska"
|
|
}
|
|
time = {
|
|
"type": "range",
|
|
"timezone": "US/Alaska" ,
|
|
"ts_start": 1728115200000,
|
|
"ts_end": 1728201600000
|
|
}
|
|
telemetry = getThingsBoardData(url, username, password, "Thunderbird Field Services", time)
|
|
|
|
|
|
# Create a Pandas Excel writer using XlsxWriter as the engine.
|
|
shutil.copyfile('/Users/nico/Documents/GitHub/ThingsBoard/EKKO Reports/thunderbirdfs-daily-report/ACW Daily Report Template.xlsx', f"/Users/nico/Documents/test/Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx")
|
|
writer = pd.ExcelWriter(
|
|
f"/Users/nico/Documents/test/Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx",
|
|
engine="openpyxl",
|
|
datetime_format="yyyy-mm-dd hh:mm:ss",
|
|
date_format="yyyy-mm-dd",
|
|
#engine_kwargs={'options': {'strings_to_numbers': True}},
|
|
mode="a",
|
|
if_sheet_exists="overlay")
|
|
reportsheet = writer.book.worksheets[0]
|
|
|
|
ignore_keys = ['latitude', 'longitude', 'speed', 'a_current', 'b_current', 'c_current', 'scada_stop_cmd', 'pit_100a_pressure', 'pit_101a_pressure', 'pit_101b_pressure', 'pit_101c_pressure', 'fit_101_flow_rate', 'fi_101b_popoff', 'fcv_101a_valve', 'fcv_101b_valve', 'pit_102_pressure', 'pit_102_hi_alm', 'pit_102_hihi_alm', 'pit_102_hi_spt', 'pit_102_hihi_spt', 'p200_hand', 'p200_auto', 'xy_200_run', 'ct_200_run', 'pit_100_pressure', 'm106a_vfd_active', 'm106a_vfd_faulted', 'm106a_vfd_frequency', 'm106a_vfd_start', 'm106a_vfd_stop', 'pit_106a_pressure', 'fit_106a_flow_rate', 'm106b_vfd_active', 'm106b_vfd_faulted', 'm106b_vfd_frequency', 'm106b_vfd_start', 'm106b_vfd_stop', 'pit_106b_pressure', 'fit_106b_flow_rate', 'pit_106c_pressure', 'pit_106d_pressure', 'sdv106_open', 'sdv106_closed', 'bp_3a_auto', 'bp_3a_hand', 'bp_3a_run_cmd', 'bp_3a_run', 'bp_3a_fault', 'bp_3b_auto', 'bp_3b_hand', 'bp_3b_run_cmd', 'bp_3b_run', 'bp_3b_fault', 'pit_107a_pressure', 'fit_107a_flow_rate', 'pit_107b_pressure', 'fcv_001_valve', 'fit_107b_flow_rate', 'pit_107d_pressure', 'fcv_002_valve', 'pit_107c_pressure', 'pit_108a_pressure', 'pit_108b_pressure', 'dpi_108a_pressure', 'pit_108c_pressure', 'pit_108d_pressure', 'pdt_108b_pressure', 'pit_108e_pressure', 'pit_108f_pressure', 'pdt_108c_pressure', 'pit_108_pressure', 'pdt_108a_hi_alm', 'pdt_108a_hihi_alm', 'pdt_108b_hi_alm', 'pdt_108b_hihi_alm', 'pdt_108c_hi_alm', 'pdt_108c_hihi_alm', 'ait_102c_ph', 'ait_102d_oil_in_water', 'fit_102_flow_rate', 'lit_112a_h2o2_level', 'lit_112b_nahso3_level', 'fis_112_h2o2_popoff', 'fit_112a_h2o2_flow_rate', 'fit_112b_nahso3_flow_rate', 'at_109d_o2_in_water', 'fit_100_hi_alm', 'fit_100_hihi_alm', 'fit_100_lo_alm', 'fit_111_flow_rate', 'pit_110_pressure', 'lit_170_level', 'lit_200_level', 'lit_101_level', 'li_103D_level_alm', 'lsh_120_hihi_alm', 'pit_050_pressure', 'pit_065_pressure', 'pdi_065_pressure', 'fit_104_n2_rate', 'p100_auto', 'p100_hand', 'sales_recirculate_sw', 'fit_109a_flow_rate', 'pit_111a_n2', 'pit_111b_n2', 'pit_111c_n2', 'ct_200_current', 'sdv_101a', 'xy_100_run', 'skim_total_barrels', 'dpi_108b_pressure', 'chemical_pump_01_run_status', 'chemical_pump_01_rate_offset', 'spt_pid_h2o2_chemical_rate', 'spt_chemical_manual_rate', 'chemical_pump_auto', 'esd_exists', 'n2_purity', 'n2_outlet_flow_rate', 'n2_outlet_temp', 'n2_inlet_pressure', 'compressor_controller_temp', 'compressor_ambient_temp', 'compressor_outlet_temp', 'compressor_outlet_pressure', 'n2_outlet_pressure', 'fit_109b_water_job', 'fit_109b_water_last_month', 'fit_109b_water_month', 'fit_109b_water_lifetime', 'fit_109b_water_today', 'fit_109b_water_yesterday', 'fit_100_water_job', 'fit_100_water_last_month', 'fit_100_water_month', 'fit_100_water_lifetime', 'fit_100_water_today', 'fit_100_water_yesterday', 'h2o2_chemical_rate', 'rmt_sd_alm', 'pnl_esd_alm', 'pit_111c_hihi_alm', 'pit_111b_hihi_alm', 'pit_111a_hihi_alm', 'pit_110_hihi_alm', 'pit_108g_hihi_alm', 'pit_108c_hihi_alm', 'pit_108b_hihi_alm', 'pit_108a_hihi_alm', 'pit_107b_lolo_alm', 'pit_107a_lolo_alm', 'pit_106b_hihi_alm', 'pit_106a_hihi_alm', 'pit_101b_transmitter_alm', 'pit_101b_hihi_alm', 'pit_101a_transmitter_alm', 'pit_101a_hihi_alm', 'pit_101a_hi_alm', 'pit_100_hihi_alm', 'pit_065_hihi_alm', 'pit_050_hihi_alm', 'pdi_065_lolo_alm', 'pdi_065_lo_alm', 'pdi_065_hihi_alm', 'm106b_vfd_faulted_alm', 'm106a_vfd_faulted_alm', 'lit_200_hihi_alm', 'lit_170_hihi_alm', 'fit_107b_lolo_alm', 'fit_107a_lolo_alm', 'fit_106b_hihi_alm', 'fit_106a_hihi_alm', 'fit_004_hihi_alm', 'bp_3b_run_fail_alm', 'bp_3a_run_fail_alm', 'ait_114c_hihi_alm', 'ait_114b_hihi_alm', 'ait_114a_hihi_alm', 'ac_volt', 'bc_volt', 'ab_volt', 'psd_alm', 'ait_114a_lolo_alm', 'ait_114a_lo_alm', 'ait_114r_lolo_alm', 'ait_114r_lo_alm', 'ait_114z_lo_alm', 'ait_114z_lolo_alm', 'ait_114x_lo_alm', 'ait_114x_lolo_alm', 'ait_114c_lolo_alm', 'ait_114c_lo_alm', 'ait_114l_lolo_alm', 'ait_114l_lo_alm', 'lit_116b_hihi_alm', 'lit_116b_hi_alm', 'lit_116a_hihi_alm', 'lit_116a_hi_alm']
|
|
|
|
#Create a Sheet for each Device
|
|
for device in telemetry.keys():
|
|
df = getDataFrame(telemetry[device], ignore_keys, time)
|
|
|
|
# Write the dataframe data to XlsxWriter. Turn off the default header and
|
|
# index and skip one row to allow us to insert a user defined header.
|
|
df.to_excel(writer, sheet_name=device, startrow=0, header=True, index=True, float_format="%.2f")
|
|
|
|
# Get the xlsxwriter workbook and worksheet objects.
|
|
workbook = writer.book
|
|
worksheet = writer.sheets[device]
|
|
for row in worksheet.iter_rows(min_row=2, max_col=1):
|
|
for cell in row:
|
|
cell.number_format = 'yyyy-mm-dd hh:mm:ss'
|
|
|
|
#Getting the data sheet for ACW #1 to access date range actually available
|
|
datasheet = writer.book.worksheets[1]
|
|
datetime_min = datasheet["A2"].value
|
|
last_data_row = get_last_data_row(datasheet)
|
|
datetime_max = datasheet[f"A{last_data_row}"].value
|
|
#Convert to excel number
|
|
datetime_min = to_excel(datetime_min)
|
|
datetime_max = round(to_excel(datetime_max))
|
|
#Change the range of the chart
|
|
chart = reportsheet._charts[0]
|
|
chart.x_axis.scaling.min = datetime_min
|
|
chart.x_axis.scaling.max = datetime_max
|
|
chart.x_axis.number_format = 'hh:mm'
|
|
reportsheet["B4"].value = dt.fromtimestamp(getTime(time)[0]/1000).strftime('%m/%d/%Y')
|
|
reportsheet["B5"] = "Test Well Name"
|
|
reportsheet["B6"] = "Test Well Lead"
|
|
reportsheet["B7"] = "Test COPA Lead"
|
|
reportsheet["B8"] = "Test Job Name"
|
|
|
|
reportsheet["B11"]= "Test Events or Spills"
|
|
reportsheet["B13"] = "Test Issues"
|
|
|
|
reportsheet["E5"] = "A very large summary test text to put into perspective the amount\n of work that is having to be done to this sheet\n for this to work"
|
|
# Close the Pandas Excel writer and output the Excel file.
|
|
writer.close()
|
|
|
|
|
|
|
|
df
|
|
|
|
|
|
# Create an AWS SES client
|
|
ses_client = boto3.client('ses', region_name='us-east-1')
|
|
|
|
|
|
# Create an email message
|
|
emails = ["nmelone@henry-pump.com"]
|
|
"""emails = [
|
|
"dvaught@thunderbirdfs.com",
|
|
"rkamper@thunderbirdfs.com",
|
|
"john.griffin@acaciaes.com",
|
|
"Joshua.Fine@fineelectricalservices2018.com"
|
|
]"""
|
|
msg = MIMEMultipart()
|
|
msg['Subject'] = "Thunderbird Field Services"
|
|
msg['From'] = 'alerts@henry-pump.com'
|
|
msg['To'] = ", ".join(emails)
|
|
|
|
# Add a text body to the message (optional)
|
|
body_text = 'Please find the attached spreadsheets.'
|
|
msg.attach(MIMEText(body_text, 'plain'))
|
|
|
|
|
|
# Attach the file to the email message
|
|
attachment = MIMEBase('application', 'octet-stream')
|
|
attachment.set_payload(open(f"/tmp/Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx", "rb").read())
|
|
encoders.encode_base64(attachment)
|
|
attachment.add_header('Content-Disposition', 'attachment', filename=f"Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx")
|
|
msg.attach(attachment)
|
|
|
|
# Send the email using AWS SES
|
|
response = ses_client.send_raw_email(
|
|
|
|
RawMessage={'Data': msg.as_string()}
|
|
)
|
|
|
|
|