changes to ekko report

This commit is contained in:
Nico Melone
2024-12-10 17:48:49 -06:00
parent bf9e80393a
commit 3db3225bf9
12 changed files with 3888 additions and 299 deletions

View File

@@ -0,0 +1,285 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import uuid, json, copy"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"device_profile_id = \"a3e85630-b25d-11ef-8d27-31960e941324\"\n",
"base_profile_path = \"/Users/nico/Documents/GitHub/thingsboard_vc/device_profile/\"\n",
"device_profile = {}"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"with open(base_profile_path+device_profile_id+\".json\", \"r\") as f:\n",
" device_profile = json.load(f)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"alarmDatapoints = [\n",
" \"pond_level_input_alm\",\n",
" \"tp_1_charge_pump_fail_to_start_alm\",\n",
" \"tp_1_hi_a_winding_alm\",\n",
" \"tp_1_hi_b_winding_alm\",\n",
" \"tp_1_hi_c_winding_alm\",\n",
" \"tp_1_hi_discharge_alm\",\n",
" \"tp_1_hi_inboard_temp_alm\",\n",
" \"tp_1_hi_outboard_temp_alm\",\n",
" \"tp_1_hi_vibration_alm\",\n",
" \"tp_1_lo_discharge_alm\",\n",
" \"tp_1_lo_oil_alm\",\n",
" \"tp_1_lo_suction_alm\",\n",
" \"tp_1_oil_cooler_failed_to_start_alm\",\n",
" \"tp_2_charge_pump_fail_to_start_alm\",\n",
" \"tp_2_hi_a_winding_alm\",\n",
" \"tp_2_hi_b_winding_alm\",\n",
" \"tp_2_hi_c_winding_alm\",\n",
" \"tp_2_hi_discharge_alm\",\n",
" \"tp_2_hi_inboard_temp_alm\",\n",
" \"tp_2_hi_outboard_temp_alm\",\n",
" \"tp_2_hi_vibration_alm\",\n",
" \"tp_2_lo_discharge_alm\",\n",
" \"tp_2_lo_oil_alm\",\n",
" \"tp_2_lo_suction_alm\",\n",
" \"tp_2_oil_cooler_failed_to_start_alm\",\n",
" \"wtp_1_discharge_alm\",\n",
" \"wtp_1_suction_alm\",\n",
" \"wtp_1_vibration_alm\",\n",
" \"wtp_2_discharge_alm\",\n",
" \"wtp_2_suction_alm\",\n",
" \"wtp_2_vibration_alm\",\n",
" \"ww_1_comms_alm\",\n",
" \"ww_1_control_power_alm\",\n",
" \"ww_1_hi_discharge_alm\",\n",
" \"ww_1_hi_flow_alm\",\n",
" \"ww_1_hoa_in_manual_alm\",\n",
" \"ww_1_lo_discharge_alm\",\n",
" \"ww_1_lo_flow_alm\",\n",
" \"ww_1_lo_pip_alm\",\n",
" \"ww_1_master_comm_alm\",\n",
" \"ww_1_vfd_alm\",\n",
" \"ww_2_comms_alm\",\n",
" \"ww_2_control_power_alm\",\n",
" \"ww_2_hi_discharge_alm\",\n",
" \"ww_2_hi_flow_alm\",\n",
" \"ww_2_hoa_in_manual_alm\",\n",
" \"ww_2_lo_discharge_alm\",\n",
" \"ww_2_lo_flow_alm\",\n",
" \"ww_2_lo_pip_alm\",\n",
" \"ww_2_master_comm_alm\",\n",
" \"ww_2_vfd_alm\",\n",
" \"ww_3_comms_alm\",\n",
" \"ww_3_control_power_alm\",\n",
" \"ww_3_hi_discharge_alm\",\n",
" \"ww_3_hi_flow_alm\",\n",
" \"ww_3_hoa_in_manual_alm\",\n",
" \"ww_3_lo_discharge_alm\",\n",
" \"ww_3_lo_flow_alm\",\n",
" \"ww_3_lo_pip_alm\",\n",
" \"ww_3_master_comm_alm\",\n",
" \"ww_3_vfd_alm\",\n",
" \"ww_4_comms_alm\",\n",
" \"ww_4_control_power_alm\",\n",
" \"ww_4_hi_discharge_alm\",\n",
" \"ww_4_hi_flow_alm\",\n",
" \"ww_4_hoa_in_manual_alm\",\n",
" \"ww_4_lo_discharge_alm\",\n",
" \"ww_4_lo_flow_alm\",\n",
" \"ww_4_lo_pip_alm\",\n",
" \"ww_4_master_comm_alm\",\n",
" \"ww_4_vfd_alm\",\n",
" \"ww_5_comms_alm\",\n",
" \"ww_5_control_power_alm\",\n",
" \"ww_5_hi_discharge_alm\",\n",
" \"ww_5_hi_flow_alm\",\n",
" \"ww_5_hoa_in_manual_alm\",\n",
" \"ww_5_lo_discharge_alm\",\n",
" \"ww_5_lo_flow_alm\",\n",
" \"ww_5_lo_pip_alm\",\n",
" \"ww_5_master_comm_alm\",\n",
" \"ww_5_vfd_alm\",\n",
" \"ww_6_comms_alm\",\n",
" \"ww_6_control_power_alm\",\n",
" \"ww_6_hi_discharge_alm\",\n",
" \"ww_6_hi_flow_alm\",\n",
" \"ww_6_hoa_in_manual_alm\",\n",
" \"ww_6_lo_discharge_alm\",\n",
" \"ww_6_lo_flow_alm\",\n",
" \"ww_6_lo_pip_alm\",\n",
" \"ww_6_master_comm_alm\",\n",
" \"ww_6_vfd_alm\"\n",
"]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"template = {\n",
" \"alarmType\" : \"Discharge Hi Alarm\",\n",
" \"clearRule\" : {\n",
" \"alarmDetails\" : None,\n",
" \"condition\" : {\n",
" \"condition\" : [ {\n",
" \"key\" : {\n",
" \"key\" : \"discharge_hi_alm\",\n",
" \"type\" : \"TIME_SERIES\"\n",
" },\n",
" \"predicate\" : {\n",
" \"type\" : \"BOOLEAN\",\n",
" \"operation\" : \"EQUAL\",\n",
" \"value\" : {\n",
" \"defaultValue\" : False,\n",
" \"dynamicValue\" : None,\n",
" \"userValue\" : None\n",
" }\n",
" },\n",
" \"value\" : None,\n",
" \"valueType\" : \"BOOLEAN\"\n",
" } ],\n",
" \"spec\" : {\n",
" \"type\" : \"DURATION\",\n",
" \"predicate\" : {\n",
" \"defaultValue\" : 30,\n",
" \"dynamicValue\" : None,\n",
" \"userValue\" : None\n",
" },\n",
" \"unit\" : \"MINUTES\"\n",
" }\n",
" },\n",
" \"dashboardId\" : None,\n",
" \"schedule\" : None\n",
" },\n",
" \"createRules\" : {\n",
" \"CRITICAL\" : {\n",
" \"alarmDetails\" : None,\n",
" \"condition\" : {\n",
" \"condition\" : [ {\n",
" \"key\" : {\n",
" \"key\" : \"discharge_hi_alm\",\n",
" \"type\" : \"TIME_SERIES\"\n",
" },\n",
" \"predicate\" : {\n",
" \"type\" : \"BOOLEAN\",\n",
" \"operation\" : \"EQUAL\",\n",
" \"value\" : {\n",
" \"defaultValue\" : True,\n",
" \"dynamicValue\" : None,\n",
" \"userValue\" : None\n",
" }\n",
" },\n",
" \"value\" : None,\n",
" \"valueType\" : \"BOOLEAN\"\n",
" } ],\n",
" \"spec\" : {\n",
" \"type\" : \"SIMPLE\"\n",
" }\n",
" },\n",
" \"dashboardId\" : None,\n",
" \"schedule\" : None\n",
" }\n",
" },\n",
" \"id\" : \"8c858b00-485e-42db-7b69-6a3f9565d823\",\n",
" \"propagate\" : False,\n",
" \"propagateRelationTypes\" : None,\n",
" \"propagateToOwner\" : False,\n",
" \"propagateToOwnerHierarchy\" : False,\n",
" \"propagateToTenant\" : False\n",
" }"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def formatName(name):\n",
" nameMap = {\n",
" \"vfd\": \"VFD\",\n",
" \"ww\": \"Water Well\",\n",
" \"tp\": \"Transfer Pump\",\n",
" \"alm\": \"Alarm\"\n",
" }\n",
"\n",
" partsUF = name.split(\"_\")\n",
" partsF = []\n",
" for part in partsUF:\n",
" partsF.append(nameMap.get(part, part.capitalize()))\n",
" return \" \".join(partsF)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"alarms = []\n",
"for alarm in alarmDatapoints:\n",
" id = str(uuid.uuid4())\n",
" alarmType = formatName(alarm)\n",
" alarmTemplate = copy.deepcopy(template)\n",
" alarmTemplate[\"alarmType\"] = alarmType\n",
" alarmTemplate[\"id\"] = id\n",
" alarmTemplate[\"clearRule\"][\"condition\"][\"condition\"][0][\"key\"][\"key\"] = alarm\n",
" alarmTemplate[\"createRules\"][\"CRITICAL\"][\"condition\"][\"condition\"][0][\"key\"][\"key\"] = alarm\n",
" alarms.append(alarmTemplate)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"device_profile[\"entity\"][\"profileData\"][\"alarms\"] = alarms\n",
"with open(base_profile_path + device_profile_id + \".json\", \"w\") as f:\n",
" json.dump(device_profile, f, indent=4)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "base",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.12"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -1,4 +1,4 @@
import logging, json, xlsxwriter, boto3, pytz, math, os
import logging, json, xlsxwriter, boto3, pytz, math, os, shutil
import pandas as pd
from datetime import datetime as dt
from datetime import timedelta as td
@@ -35,8 +35,8 @@ def getDeviceKeys(rest_client, devices,target_device):
return device, keys, None
return None, None,"Device Not Found"
except Exception as e:
print("Something went wrong in getDeviceKeys")
print(e)
logging.error("Something went wrong in getDeviceKeys")
logging.error(e)
return (None, None, e)
@@ -44,8 +44,8 @@ def getTelemetry(rest_client, device, keys, start_ts, end_ts,limit):
try:
return rest_client.get_timeseries(entity_id=device.id, keys=keys, start_ts=start_ts, end_ts=end_ts, limit=limit) #entity_type=entity_type,
except Exception as e:
print("Something went wrong in getTelemetry")
print(e)
logging.error("Something went wrong in getTelemetry")
logging.error(e)
return False
@@ -167,6 +167,7 @@ def lambda_handler(event, context):
datetime_format="yyyy-mm-dd hh:mm:ss",
date_format="yyyy-mm-dd",engine_kwargs={'options': {'strings_to_numbers': True}})
chartsheet = writer.book.add_worksheet("Charts")
reportsheet = writer.book.add_worksheet("Report")
ignore_keys = ['latitude', 'longitude', 'speed', 'a_current', 'b_current', 'c_current', 'scada_stop_cmd', 'pit_100a_pressure', 'pit_101a_pressure', 'pit_101b_pressure', 'pit_101c_pressure', 'fit_101_flow_rate', 'fi_101b_popoff', 'fcv_101a_valve', 'fcv_101b_valve', 'pit_102_pressure', 'pit_102_hi_alm', 'pit_102_hihi_alm', 'pit_102_hi_spt', 'pit_102_hihi_spt', 'p200_hand', 'p200_auto', 'xy_200_run', 'ct_200_run', 'pit_100_pressure', 'm106a_vfd_active', 'm106a_vfd_faulted', 'm106a_vfd_frequency', 'm106a_vfd_start', 'm106a_vfd_stop', 'pit_106a_pressure', 'fit_106a_flow_rate', 'm106b_vfd_active', 'm106b_vfd_faulted', 'm106b_vfd_frequency', 'm106b_vfd_start', 'm106b_vfd_stop', 'pit_106b_pressure', 'fit_106b_flow_rate', 'pit_106c_pressure', 'pit_106d_pressure', 'sdv106_open', 'sdv106_closed', 'bp_3a_auto', 'bp_3a_hand', 'bp_3a_run_cmd', 'bp_3a_run', 'bp_3a_fault', 'bp_3b_auto', 'bp_3b_hand', 'bp_3b_run_cmd', 'bp_3b_run', 'bp_3b_fault', 'pit_107a_pressure', 'fit_107a_flow_rate', 'pit_107b_pressure', 'fcv_001_valve', 'fit_107b_flow_rate', 'pit_107d_pressure', 'fcv_002_valve', 'pit_107c_pressure', 'pit_108a_pressure', 'pit_108b_pressure', 'dpi_108a_pressure', 'pit_108c_pressure', 'pit_108d_pressure', 'pdt_108b_pressure', 'pit_108e_pressure', 'pit_108f_pressure', 'pdt_108c_pressure', 'pit_108_pressure', 'pdt_108a_hi_alm', 'pdt_108a_hihi_alm', 'pdt_108b_hi_alm', 'pdt_108b_hihi_alm', 'pdt_108c_hi_alm', 'pdt_108c_hihi_alm', 'ait_102c_ph', 'ait_102d_oil_in_water', 'fit_102_flow_rate', 'lit_112a_h2o2_level', 'lit_112b_nahso3_level', 'fis_112_h2o2_popoff', 'fit_112a_h2o2_flow_rate', 'fit_112b_nahso3_flow_rate', 'at_109d_o2_in_water', 'fit_100_hi_alm', 'fit_100_hihi_alm', 'fit_100_lo_alm', 'fit_111_flow_rate', 'pit_110_pressure', 'lit_170_level', 'lit_200_level', 'lit_101_level', 'li_103D_level_alm', 'lsh_120_hihi_alm', 'pit_050_pressure', 'pit_065_pressure', 'pdi_065_pressure', 'fit_104_n2_rate', 'p100_auto', 'p100_hand', 'sales_recirculate_sw', 'fit_109a_flow_rate', 'pit_111a_n2', 'pit_111b_n2', 'pit_111c_n2', 'ct_200_current', 'sdv_101a', 'xy_100_run', 'skim_total_barrels', 'dpi_108b_pressure', 'chemical_pump_01_run_status', 'chemical_pump_01_rate_offset', 'spt_pid_h2o2_chemical_rate', 'spt_chemical_manual_rate', 'chemical_pump_auto', 'esd_exists', 'n2_purity', 'n2_outlet_flow_rate', 'n2_outlet_temp', 'n2_inlet_pressure', 'compressor_controller_temp', 'compressor_ambient_temp', 'compressor_outlet_temp', 'compressor_outlet_pressure', 'n2_outlet_pressure', 'fit_109b_water_job', 'fit_109b_water_last_month', 'fit_109b_water_month', 'fit_109b_water_lifetime', 'fit_109b_water_today', 'fit_109b_water_yesterday', 'fit_100_water_job', 'fit_100_water_last_month', 'fit_100_water_month', 'fit_100_water_lifetime', 'fit_100_water_today', 'fit_100_water_yesterday', 'h2o2_chemical_rate', 'rmt_sd_alm', 'pnl_esd_alm', 'pit_111c_hihi_alm', 'pit_111b_hihi_alm', 'pit_111a_hihi_alm', 'pit_110_hihi_alm', 'pit_108g_hihi_alm', 'pit_108c_hihi_alm', 'pit_108b_hihi_alm', 'pit_108a_hihi_alm', 'pit_107b_lolo_alm', 'pit_107a_lolo_alm', 'pit_106b_hihi_alm', 'pit_106a_hihi_alm', 'pit_101b_transmitter_alm', 'pit_101b_hihi_alm', 'pit_101a_transmitter_alm', 'pit_101a_hihi_alm', 'pit_101a_hi_alm', 'pit_100_hihi_alm', 'pit_065_hihi_alm', 'pit_050_hihi_alm', 'pdi_065_lolo_alm', 'pdi_065_lo_alm', 'pdi_065_hihi_alm', 'm106b_vfd_faulted_alm', 'm106a_vfd_faulted_alm', 'lit_200_hihi_alm', 'lit_170_hihi_alm', 'fit_107b_lolo_alm', 'fit_107a_lolo_alm', 'fit_106b_hihi_alm', 'fit_106a_hihi_alm', 'fit_004_hihi_alm', 'bp_3b_run_fail_alm', 'bp_3a_run_fail_alm', 'ait_114c_hihi_alm', 'ait_114b_hihi_alm', 'ait_114a_hihi_alm', 'ac_volt', 'bc_volt', 'ab_volt', 'psd_alm', 'ait_114a_lolo_alm', 'ait_114a_lo_alm', 'ait_114r_lolo_alm', 'ait_114r_lo_alm', 'ait_114z_lo_alm', 'ait_114z_lolo_alm', 'ait_114x_lo_alm', 'ait_114x_lolo_alm', 'ait_114c_lolo_alm', 'ait_114c_lo_alm', 'ait_114l_lolo_alm', 'ait_114l_lo_alm', 'lit_116b_hihi_alm', 'lit_116b_hi_alm', 'lit_116a_hihi_alm', 'lit_116a_hi_alm']
unit_mapping = {
"level": "ft",
@@ -196,6 +197,7 @@ def lambda_handler(event, context):
"AT 109A TURBIDITY": "OUTLET TURBIDITY",
"AT 109E ORP": "OUTLET ORP"
}
#Create a Sheet for each Device
for device in telemetry.keys():
df = getDataFrame(telemetry[device], ignore_keys, time)
@@ -279,6 +281,9 @@ def lambda_handler(event, context):
chartsheet.insert_chart('A' + str(position), chart, {'x_scale': 3, 'y_scale': 2})
position += 30
title_format = writer.book.add_format({'bold': True, 'font_color': "#5f5f5f", 'font_name': "Aptos Narrow", 'font_size': 36})
reportsheet.write(0,0, "ACW Report", title_format)
# Close the Pandas Excel writer and output the Excel file.
writer.close()

View File

@@ -5,7 +5,7 @@
],
"customers": {
"ec691940-52e2-11ec-a919-556e8dbef35c": {
"name": "CrownQuest",
"name": "OxyRock",
"deviceTypes": [
{
"deviceType": "rigpump",
@@ -17,18 +17,27 @@
{
"deviceType": "cqwatertanks",
"dataPoints": [
"fm_1_flow_rate"
"fm_1_flow_rate",
"tank_1_level"
]
}
]
},
"81083430-6988-11ec-a919-556e8dbef35c": {
"name": "Henry-Petroleum",
"deviceTypes": [
},
{
"deviceType": "abbflow",
"deviceType": "piflow",
"dataPoints": [
"accumulated_volume"
"avgFrequency30Days",
"percentRunTime30Days",
"yesterday_totalizer_1",
"totalizer_1"
]
},
{
"deviceType": "advvfdipp",
"dataPoints": [
"flowtotalyesterday",
"fluidlevel",
"energytotalyesterday",
"avgFrequency30Days",
"percentRunTime30Days"
]
}
]
@@ -36,7 +45,7 @@
},
"filterDevicesIn": [],
"filterDevicesOut": [],
"name": "CrownQuest-Daily-Report"
"name": "OxyRock-Daily-Report"
},
{
"emails": [

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,2 @@
DOCKER_HOST=unix:///Users/nico/.docker/run/docker.sock sam build --use-container
DOCKER_HOST=unix:///Users/nico/.docker/run/docker.sock sam local invoke

View File

@@ -5,38 +5,70 @@
],
"customers": {
"ec691940-52e2-11ec-a919-556e8dbef35c": {
"name": "CrownQuest",
"name": "OxyRock",
"deviceTypes": [
{
"deviceType": "rigpump",
"dataPoints": [
"vfd_current",
"vfd_frequency"
]
],
"labels":{
"vfd_current": "VFD Current",
"vfd_frequency": "VFD Frequency"
}
},
{
"deviceType": "cqwatertanks",
"dataPoints": [
"fm_1_flow_rate"
]
}
]
},
"81083430-6988-11ec-a919-556e8dbef35c": {
"name": "Henry-Petroleum",
"deviceTypes": [
"fm_1_flow_rate",
"tank_1_level",
"tank_2_level"
],
"labels":{
"fm_1_flow_rate": "Flow Meter 1 Flow Rate",
"tank_1_level": "Tank 1 Level",
"tank_2_level": "Tank 2 Level"
}
},
{
"deviceType": "abbflow",
"deviceType": "piflow",
"dataPoints": [
"accumulated_volume"
]
"avgFrequency30Days",
"percentRunTime30Days",
"yesterday_totalizer_1",
"totalizer_1"
],
"labels":{
"avgFrequency30Days": "Avg. Frequency 30 Days",
"percentRunTime30Days": "Run Time 30 Days %",
"yesterday_totalizer_1": "Yesterday's Volume",
"totalizer_1": "Totalizer 1"
}
},
{
"deviceType": "advvfdipp",
"dataPoints": [
"flowtotalyesterday",
"fluidlevel",
"energytotalyesterday",
"avgFrequency30Days",
"percentRunTime30Days"
],
"labels":{
"flowtotalyesterday": "Yesterday's Volume",
"fluidlevel": "Fluid Level",
"energytotalyesterday": "Yesterday's Energy",
"avgFrequency30Days": "Avg. Frequency 30 Days",
"percentRunTime30Days": "Run Time 30 Days %"
}
}
]
}
},
"filterDevicesIn": [],
"filterDevicesOut": [],
"name": "CrownQuest-Daily-Report"
"name": "OxyRock-Daily-Report"
},
{
"emails": [
@@ -50,7 +82,10 @@
"deviceType": "abbflow",
"dataPoints": [
"accumulated_volume"
]
],
"labels":{
"accumulated_volume": "Accumulated Volume"
}
}
]
}

View File

@@ -1,79 +1,137 @@
from tb_rest_client.rest_client_pe import *
from tb_rest_client.rest import ApiException
import json, xlsxwriter, boto3, os
import json, xlsxwriter, boto3, os, time
from threading import Lock
from datetime import datetime as dt
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email import encoders
from email.mime.base import MIMEBase
# Define a rate limiter class
class RateLimiter:
def __init__(self, max_calls, period):
self.max_calls = max_calls
self.period = period
self.call_times = []
self.lock = Lock()
def acquire(self):
with self.lock:
current_time = time.time()
# Remove expired calls
self.call_times = [t for t in self.call_times if t > current_time - self.period]
if len(self.call_times) >= self.max_calls:
# Wait for the oldest call to expire
time_to_wait = self.period - (current_time - self.call_times[0])
time.sleep(time_to_wait)
# Register the current call
self.call_times.append(time.time())
# Initialize a rate limiter
rate_limiter = RateLimiter(max_calls=10, period=1) # Adjust `max_calls` and `period` as needed
def sort_dict_keys(d):
"""Sorts the keys of all nested dictionaries in a given dictionary.
Args:
d: The input dictionary.
Returns:
A new dictionary with sorted keys at each level.
"""
sorted_d = {}
for k, v in d.items():
if isinstance(v, dict):
sorted_d[k] = sort_dict_keys(v)
else:
sorted_d[k] = v
return dict(sorted(sorted_d.items()))
def lambda_handler(event, context):
#Creating Rest Client for ThingsBoard
# Creating Rest Client for ThingsBoard
with RestClientPE(base_url="https://hp.henrypump.cloud") as rest_client:
try:
rest_client.login(username=os.environ["username"], password=os.environ["password"])
#Loading Config from file
# Loading Config from file
with open("./config.json") as f:
config = json.load(f)
reportData = {}
reportToList = {}
#Loop through each item in config, each item represents a report
# Loop through each item in config, each item represents a report
for report in config:
reportToList[report["name"]] = report["emails"]
#Each customer becomes it's own xlsx file later
for customer in report["customers"].keys():
#Get all the devices for a given customer
devices = rest_client.get_customer_devices(customer_id=customer, page=0, page_size=100)
#Filter devices to the desired devices
# Apply rate limiting for API calls
rate_limiter.acquire()
devices = rest_client.get_customer_devices(customer_id=customer, page=0, page_size=1000)
if report["filterDevicesIn"]:
devices.data = [device for device in devices.data if device.id.id in report["filterDevicesIn"]]
if report["filterDevicesOut"]:
devices.data = [device for device in devices.data if device.id.id not in report["filterDevicesOut"]]
#Create the report in reportData if needed
if not reportData.get(report["name"], None):
reportData[report["name"]] = {}
#Go through all the devices and add them and their desired data to reportData
for device in devices.data:
name = device.name
keys = rest_client.get_timeseries_keys_v1(device.id)
#Filter keys to desired keys
for deviceType in report["customers"][customer]["deviceTypes"]:
if device.type == deviceType["deviceType"]:
rate_limiter.acquire()
keys = rest_client.get_timeseries_keys_v1(device.id)
keys = list(filter(lambda x: x in deviceType["dataPoints"], keys))
#Create customer if needed
#Check for report customer
if not reportData[report["name"]].get(report["customers"][customer]["name"], None):
reportData[report["name"]][report["customers"][customer]["name"]] = {}
#Check to make sure the deviceType is desired in the report for the given device
#Check for device type in config
if device.type in list(map(lambda x: x["deviceType"], report["customers"][customer]["deviceTypes"])):
#Create deviceType if needed
#Check if deviceType in report
if not reportData[report["name"]][report["customers"][customer]["name"]].get(device.type, None):
reportData[report["name"]][report["customers"][customer]["name"]][device.type] = {}
reportData[report["name"]][report["customers"][customer]["name"]][device.type][device.name] = rest_client.get_latest_timeseries(entity_id=device.id , keys=",".join(keys))
if keys:
rate_limiter.acquire()
deviceData = rest_client.get_latest_timeseries(entity_id=device.id, keys=",".join(keys))
for x in report["customers"][customer]["deviceTypes"]:
if x["deviceType"] == device.type:
labels = x["labels"]
labelled_data = {}
for k,v in labels.items():
labelled_data[v] = {}
for k,v in deviceData.items():
labelled_data[labels[k]] = v
reportData[report["name"]][report["customers"][customer]["name"]][device.type][device.name] = labelled_data
else:
reportData[report["name"]][report["customers"][customer]["name"]][device.type][device.name] = {}
#Sort Data
reportDataSorted = sort_dict_keys(reportData)
#print(json.dumps(reportDataSorted,indent=4))
except ApiException as e:
print(e)
print(f"API Exception: {e}")
except Exception as e:
print(f"Other Exception in getting data:\n{e}")
# Create an AWS SES client
ses_client = boto3.client('ses', region_name='us-east-1')
s3 = boto3.resource('s3')
BUCKET_NAME = "thingsboard-email-reports"
# Create a workbook for each report
for report_name, report_data in reportData.items():
for report_name, report_data in reportDataSorted.items():
#will generate an email lower down
spreadsheets = []
# Create a worksheet for each company
for company_name, company_data in report_data.items():
workbook = xlsxwriter.Workbook(f"/tmp/{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx")
workbook = xlsxwriter.Workbook(f"/tmp/{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx",{'strings_to_numbers': True})
bold = workbook.add_format({'bold': True})
# Create a sheet for each device type
for device_type, device_data in company_data.items():
worksheet = workbook.add_worksheet(device_type)
# Set the header row with device types
device_types = list(device_data.keys())
worksheet.write_column(1, 0, device_types,bold)
# Set the header column with device types
device_names = list(device_data.keys())
worksheet.write_column(1, 0, device_names,bold)
# Write the data to the sheet
for i, (telemetry_name, telemetry_data) in enumerate(device_data.items()):
# Set the header row with telemetry names
@@ -82,9 +140,12 @@ def lambda_handler(event, context):
for j, (data_name, data) in enumerate(telemetry_data.items()):
values = [d["value"] for d in data]
worksheet.write_row(i + 1, j+ 1, values)
worksheet.autofit()
worksheet.autofit()
workbook.close()
spreadsheets.append(workbook)
# Store the generated report in S3.
s3.Object(BUCKET_NAME, f'{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx').put(Body=open(f"/tmp/{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx", 'rb'))
# Create an email message
msg = MIMEMultipart()
msg['Subject'] = report_name
@@ -104,7 +165,6 @@ def lambda_handler(event, context):
attachment.add_header('Content-Disposition', 'attachment', filename=spreadsheet.filename[5:])
msg.attach(attachment)
# Send the email using AWS SES
response = ses_client.send_raw_email(

View File

@@ -19,12 +19,22 @@ Resources:
Variables:
username: henry.pump.automation@gmail.com
password: Henry Pump @ 2022
TBREPORTBUCKET_BUCKET_NAME: !Ref TBReportBucket
TBREPORTBUCKET_BUCKET_ARN: !GetAtt TBReportBucket.Arn
Architectures:
- arm64
CodeUri: tbreport
Runtime: python3.12
Handler: tbreport.lambda_handler
Policies: AmazonSESFullAccess
Policies:
- AmazonSESFullAccess
- Statement:
- Effect: Allow
Action:
- s3:PutObject
Resource:
- !Sub arn:${AWS::Partition}:s3:::${TBReportBucket}
- !Sub arn:${AWS::Partition}:s3:::${TBReportBucket}/*
Layers:
- !Ref TBReportLayer
TBReportLayer:
@@ -65,4 +75,33 @@ Resources:
Statement:
- Effect: Allow
Action: lambda:InvokeFunction
Resource: !GetAtt TBReport.Arn
Resource: !GetAtt TBReport.Arn
TBReportBucket:
Type: AWS::S3::Bucket
Properties:
BucketName: !Sub thingsboard-email-reports
BucketEncryption:
ServerSideEncryptionConfiguration:
- ServerSideEncryptionByDefault:
SSEAlgorithm: aws:kms
KMSMasterKeyID: alias/aws/s3
PublicAccessBlockConfiguration:
IgnorePublicAcls: true
RestrictPublicBuckets: true
TBReportBucketBucketPolicy:
Type: AWS::S3::BucketPolicy
Properties:
Bucket: !Ref TBReportBucket
PolicyDocument:
Id: RequireEncryptionInTransit
Version: '2012-10-17'
Statement:
- Principal: '*'
Action: '*'
Effect: Deny
Resource:
- !GetAtt TBReportBucket.Arn
- !Sub ${TBReportBucket.Arn}/*
Condition:
Bool:
aws:SecureTransport: 'false'

View File

@@ -0,0 +1,438 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import logging, json, boto3, pytz, math, os, shutil\n",
"from openpyxl.utils import get_column_letter\n",
"from openpyxl.utils.datetime import CALENDAR_WINDOWS_1900, to_excel\n",
"import pandas as pd\n",
"from datetime import datetime as dt\n",
"from datetime import timedelta as td\n",
"import datetime as dtf\n",
"from tb_rest_client.rest_client_ce import *\n",
"from tb_rest_client.rest import ApiException\n",
"from email.mime.multipart import MIMEMultipart\n",
"from email.mime.text import MIMEText\n",
"from email import encoders\n",
"from email.mime.base import MIMEBase"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"logging.basicConfig(level=logging.DEBUG,\n",
" format='%(asctime)s - %(levelname)s - %(module)s - %(lineno)d - %(message)s',\n",
" datefmt='%Y-%m-%d %H:%M:%S')"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"# ThingsBoard REST API URL\n",
"url = \"https://www.enxlekkocloud.com\" #\"https://hp.henrypump.cloud\"\n",
"# Default Tenant Administrator credentials\n",
"username = \"nico.a.melone@gmail.com\" #\"henry.pump.automation@gmail.com\"\n",
"password = \"9EE#mqb*b6bXV9hJrPYGm&w3q5Y@3acumvvb5isQ\" #\"Henry Pump @ 2022\""
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"def getDevices(rest_client, customers,target_customer, page=0, pageSize=500):\n",
" for c in customers.data:\n",
" if c.name == target_customer:\n",
" cid = c.id.id\n",
" devices = rest_client.get_customer_devices(customer_id=cid, page_size=pageSize, page=page)\n",
" return devices #.to_dict()"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"def getDeviceKeys(rest_client, devices,target_device):\n",
" try:\n",
" for d in devices.data:\n",
" if d.name == target_device:\n",
" device = d\n",
" keys = rest_client.get_timeseries_keys_v1(d.id)\n",
" return device, keys, None\n",
" return None, None,\"Device Not Found\"\n",
" except Exception as e:\n",
" logging.error(\"Something went wrong in getDeviceKeys\")\n",
" logging.error(e)\n",
" return (None, None, e)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"def getTelemetry(rest_client, device, keys, start_ts, end_ts,limit):\n",
" try:\n",
" return rest_client.get_timeseries(entity_id=device.id, keys=keys, start_ts=start_ts, end_ts=end_ts, limit=limit) #entity_type=entity_type, \n",
" except Exception as e:\n",
" logging.error(\"Something went wrong in getTelemetry\")\n",
" logging.error(e)\n",
" return False\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"def getTime(timeRequest):\n",
" start_ts, end_ts = 0,0\n",
" if timeRequest[\"type\"] == \"last\":\n",
" now = dt.now()\n",
" delta = td(days=timeRequest[\"days\"], seconds=timeRequest[\"seconds\"], microseconds=timeRequest[\"microseconds\"], milliseconds=timeRequest[\"milliseconds\"], minutes=timeRequest[\"minutes\"], hours=timeRequest[\"hours\"], weeks=timeRequest[\"weeks\"])\n",
" start_ts = str(int(dt.timestamp(now - delta) * 1000))\n",
" end_ts = str(int(dt.timestamp(now) * 1000))\n",
" elif timeRequest[\"type\"] == \"midnight-midnight\":\n",
" timezone = pytz.timezone(timeRequest[\"timezone\"])\n",
" today = dtf.date.today()\n",
" yesterday_midnight = dtf.datetime.combine(today - dtf.timedelta(days=1), dtf.time())\n",
" today_midnight = dtf.datetime.combine(today, dtf.time())\n",
" yesterday_midnight = timezone.localize(yesterday_midnight)\n",
" today_midnight = timezone.localize(today_midnight)\n",
" start_ts = int(yesterday_midnight.timestamp()) * 1000\n",
" end_ts = int(today_midnight.timestamp()) * 1000\n",
" elif timeRequest[\"type\"] == \"range\":\n",
" start_ts = timeRequest[\"ts_start\"]\n",
" end_ts = timeRequest[\"ts_end\"]\n",
" return (start_ts, end_ts)"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"def getThingsBoardData(url, username, password, targetCustomer, timeRequest):\n",
" # Creating the REST client object with context manager to get auto token refresh\n",
" with RestClientCE(base_url=url) as rest_client:\n",
" try:\n",
" # Auth with credentials\n",
" rest_client.login(username=username, password=password)\n",
" # Get customers > get devices under a target customer > get keys for devices > get data for devices\n",
" customers = rest_client.get_customers(page_size=\"100\", page=\"0\")\n",
" devices = getDevices(rest_client=rest_client, customers=customers, target_customer=targetCustomer)\n",
" telemetry = {}\n",
" for d in devices.data:\n",
" #print(d.name)\n",
" device, keys, err = getDeviceKeys(rest_client=rest_client, devices=devices, target_device=d.name)\n",
" start_ts, end_ts = getTime(timeRequest)\n",
" #print(keys)\n",
" telemetry[d.name] = getTelemetry(rest_client=rest_client, device=device, keys=','.join(keys), start_ts=start_ts, end_ts=end_ts, limit=25000)\n",
" return telemetry\n",
" except ApiException as e:\n",
" logging.error(e)\n",
" return False\n"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"def getMaxWidth():\n",
" label_mapping = {\n",
" \"Lit 116b Level\": \"WASTE TANK 1\",\n",
" \"Lit 116a Level\": \"WASTE TANK 2\",\n",
" \"Fit 100 Flow Rate\": \"INLET FLOW RATE\",\n",
" \"Fit 109b Flow Rate\": \"SALES FLOW RATE\",\n",
" \"Outlet Turbidity Temp\": \"OUTLET TURBIDITY TEMP\",\n",
" \"Outlet Orp Temp\": \"OUTLET ORP TEMP\",\n",
" \"Inlet Turbidity Temp\": \"INLET TURBIDITY TEMP\",\n",
" \"Inlet Ph Temp\": \"INLET PH TEMP\",\n",
" \"Ait 102b H2s\": \"INLET H₂S\",\n",
" \"At 109b H2s\": \"OUTLET H₂S\",\n",
" \"At 109c Oil In Water\": \"OUTLET OIL IN WATER\",\n",
" \"Ait 102a Turbitity\": \"INLET TURBIDITY\",\n",
" \"At 109a Turbidity\": \"OUTLET TURBIDITY\",\n",
" \"At 109e Orp\": \"OUTLET ORP\"\n",
" }\n",
" width = 0\n",
" for key,value in label_mapping.items():\n",
" if(len(value) > width):\n",
" width = len(value)\n",
"\n",
" return width"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"def formatColumnName(telemetryName):\n",
" name = \" \".join([x.capitalize() for x in telemetryName.split(\"_\")])\n",
" label_mapping = {\n",
" \"Lit 116b Level\": \"WASTE TANK 1\",\n",
" \"Lit 116a Level\": \"WASTE TANK 2\",\n",
" \"Fit 100 Flow Rate\": \"INLET FLOW RATE\",\n",
" \"Fit 109b Flow Rate\": \"SALES FLOW RATE\",\n",
" \"Outlet Turbidity Temp\": \"OUTLET TURBIDITY TEMP\",\n",
" \"Outlet Orp Temp\": \"OUTLET ORP TEMP\",\n",
" \"Inlet Turbidity Temp\": \"INLET TURBIDITY TEMP\",\n",
" \"Inlet Ph Temp\": \"INLET PH TEMP\",\n",
" \"Ait 102b H2s\": \"INLET H₂S\",\n",
" \"At 109b H2s\": \"OUTLET H₂S\",\n",
" \"At 109c Oil In Water\": \"OUTLET OIL IN WATER\",\n",
" \"Ait 102a Turbitity\": \"INLET TURBIDITY\",\n",
" \"At 109a Turbidity\": \"OUTLET TURBIDITY\",\n",
" \"At 109e Orp\": \"OUTLET ORP\"\n",
" }\n",
" return label_mapping.get(name)"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"def formatChartName(telemetryName):\n",
" return \" \".join([x.upper() for x in telemetryName.split(\"_\")])"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"def getDataFrame(telemetry, ignore_keys, time): \n",
" df = pd.DataFrame()\n",
" #for location in telemetry.keys():\n",
" # Iterate through each datapoint within each location\n",
" for datapoint in telemetry.keys():\n",
" # Convert the datapoint list of dictionaries to a DataFrame\n",
" if datapoint not in ignore_keys:\n",
" temp_df = pd.DataFrame(telemetry[datapoint])\n",
" temp_df['ts'] = pd.to_datetime(temp_df['ts'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(time[\"timezone\"]).dt.tz_localize(None)\n",
" # Set 'ts' as the index\n",
" temp_df.set_index('ts', inplace=True)\n",
" temp_df[\"value\"] = pd.to_numeric(temp_df[\"value\"], errors=\"coerce\")\n",
" # Rename 'value' column to the name of the datapoint\n",
" temp_df.rename(columns={'value': formatColumnName(datapoint)}, inplace=True)\n",
" \n",
" # Join the temp_df to the main DataFrame\n",
" df = df.join(temp_df, how='outer')\n",
" df.ffill()\n",
" #df = df.fillna(method='ffill', limit=2)\n",
" # Rename index to 'Date'\n",
" df.rename_axis('Date', inplace=True)\n",
" return df"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [],
"source": [
"def get_last_data_row(ws):\n",
" # Start from the bottom row and work up to find the last row with data\n",
" for row in range(ws.max_row, 0, -1):\n",
" if any(cell.value is not None for cell in ws[row]):\n",
" return row\n",
" return 0 # If no data is found, return 0"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [],
"source": [
"time = {\n",
" \"type\": \"last\",\n",
" \"days\":3,\n",
" \"seconds\":0,\n",
" \"microseconds\":0,\n",
" \"milliseconds\":0,\n",
" \"minutes\":0,\n",
" \"hours\":0,\n",
" \"weeks\":0,\n",
" \"timezone\": \"US/Central\"\n",
" }\n",
"time = {\n",
" \"type\": \"midnight-midnight\",\n",
" \"timezone\": \"US/Alaska\" \n",
"}\n",
"time = {\n",
" \"type\": \"range\",\n",
" \"timezone\": \"US/Alaska\" ,\n",
" \"ts_start\": 1728115200000,\n",
" \"ts_end\": 1728201600000\n",
"}\n",
"telemetry = getThingsBoardData(url, username, password, \"Thunderbird Field Services\", time)"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [],
"source": [
"# Create a Pandas Excel writer using XlsxWriter as the engine.\n",
"shutil.copyfile('/Users/nico/Documents/GitHub/ThingsBoard/EKKO Reports/thunderbirdfs-daily-report/ACW Daily Report Template.xlsx', f\"/Users/nico/Documents/test/Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx\")\n",
"writer = pd.ExcelWriter(\n",
" f\"/Users/nico/Documents/test/Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx\", \n",
" engine=\"openpyxl\",\n",
" datetime_format=\"yyyy-mm-dd hh:mm:ss\",\n",
" date_format=\"yyyy-mm-dd\",\n",
" #engine_kwargs={'options': {'strings_to_numbers': True}},\n",
" mode=\"a\",\n",
" if_sheet_exists=\"overlay\")\n",
"reportsheet = writer.book.worksheets[0]\n",
"\n",
"ignore_keys = ['latitude', 'longitude', 'speed', 'a_current', 'b_current', 'c_current', 'scada_stop_cmd', 'pit_100a_pressure', 'pit_101a_pressure', 'pit_101b_pressure', 'pit_101c_pressure', 'fit_101_flow_rate', 'fi_101b_popoff', 'fcv_101a_valve', 'fcv_101b_valve', 'pit_102_pressure', 'pit_102_hi_alm', 'pit_102_hihi_alm', 'pit_102_hi_spt', 'pit_102_hihi_spt', 'p200_hand', 'p200_auto', 'xy_200_run', 'ct_200_run', 'pit_100_pressure', 'm106a_vfd_active', 'm106a_vfd_faulted', 'm106a_vfd_frequency', 'm106a_vfd_start', 'm106a_vfd_stop', 'pit_106a_pressure', 'fit_106a_flow_rate', 'm106b_vfd_active', 'm106b_vfd_faulted', 'm106b_vfd_frequency', 'm106b_vfd_start', 'm106b_vfd_stop', 'pit_106b_pressure', 'fit_106b_flow_rate', 'pit_106c_pressure', 'pit_106d_pressure', 'sdv106_open', 'sdv106_closed', 'bp_3a_auto', 'bp_3a_hand', 'bp_3a_run_cmd', 'bp_3a_run', 'bp_3a_fault', 'bp_3b_auto', 'bp_3b_hand', 'bp_3b_run_cmd', 'bp_3b_run', 'bp_3b_fault', 'pit_107a_pressure', 'fit_107a_flow_rate', 'pit_107b_pressure', 'fcv_001_valve', 'fit_107b_flow_rate', 'pit_107d_pressure', 'fcv_002_valve', 'pit_107c_pressure', 'pit_108a_pressure', 'pit_108b_pressure', 'dpi_108a_pressure', 'pit_108c_pressure', 'pit_108d_pressure', 'pdt_108b_pressure', 'pit_108e_pressure', 'pit_108f_pressure', 'pdt_108c_pressure', 'pit_108_pressure', 'pdt_108a_hi_alm', 'pdt_108a_hihi_alm', 'pdt_108b_hi_alm', 'pdt_108b_hihi_alm', 'pdt_108c_hi_alm', 'pdt_108c_hihi_alm', 'ait_102c_ph', 'ait_102d_oil_in_water', 'fit_102_flow_rate', 'lit_112a_h2o2_level', 'lit_112b_nahso3_level', 'fis_112_h2o2_popoff', 'fit_112a_h2o2_flow_rate', 'fit_112b_nahso3_flow_rate', 'at_109d_o2_in_water', 'fit_100_hi_alm', 'fit_100_hihi_alm', 'fit_100_lo_alm', 'fit_111_flow_rate', 'pit_110_pressure', 'lit_170_level', 'lit_200_level', 'lit_101_level', 'li_103D_level_alm', 'lsh_120_hihi_alm', 'pit_050_pressure', 'pit_065_pressure', 'pdi_065_pressure', 'fit_104_n2_rate', 'p100_auto', 'p100_hand', 'sales_recirculate_sw', 'fit_109a_flow_rate', 'pit_111a_n2', 'pit_111b_n2', 'pit_111c_n2', 'ct_200_current', 'sdv_101a', 'xy_100_run', 'skim_total_barrels', 'dpi_108b_pressure', 'chemical_pump_01_run_status', 'chemical_pump_01_rate_offset', 'spt_pid_h2o2_chemical_rate', 'spt_chemical_manual_rate', 'chemical_pump_auto', 'esd_exists', 'n2_purity', 'n2_outlet_flow_rate', 'n2_outlet_temp', 'n2_inlet_pressure', 'compressor_controller_temp', 'compressor_ambient_temp', 'compressor_outlet_temp', 'compressor_outlet_pressure', 'n2_outlet_pressure', 'fit_109b_water_job', 'fit_109b_water_last_month', 'fit_109b_water_month', 'fit_109b_water_lifetime', 'fit_109b_water_today', 'fit_109b_water_yesterday', 'fit_100_water_job', 'fit_100_water_last_month', 'fit_100_water_month', 'fit_100_water_lifetime', 'fit_100_water_today', 'fit_100_water_yesterday', 'h2o2_chemical_rate', 'rmt_sd_alm', 'pnl_esd_alm', 'pit_111c_hihi_alm', 'pit_111b_hihi_alm', 'pit_111a_hihi_alm', 'pit_110_hihi_alm', 'pit_108g_hihi_alm', 'pit_108c_hihi_alm', 'pit_108b_hihi_alm', 'pit_108a_hihi_alm', 'pit_107b_lolo_alm', 'pit_107a_lolo_alm', 'pit_106b_hihi_alm', 'pit_106a_hihi_alm', 'pit_101b_transmitter_alm', 'pit_101b_hihi_alm', 'pit_101a_transmitter_alm', 'pit_101a_hihi_alm', 'pit_101a_hi_alm', 'pit_100_hihi_alm', 'pit_065_hihi_alm', 'pit_050_hihi_alm', 'pdi_065_lolo_alm', 'pdi_065_lo_alm', 'pdi_065_hihi_alm', 'm106b_vfd_faulted_alm', 'm106a_vfd_faulted_alm', 'lit_200_hihi_alm', 'lit_170_hihi_alm', 'fit_107b_lolo_alm', 'fit_107a_lolo_alm', 'fit_106b_hihi_alm', 'fit_106a_hihi_alm', 'fit_004_hihi_alm', 'bp_3b_run_fail_alm', 'bp_3a_run_fail_alm', 'ait_114c_hihi_alm', 'ait_114b_hihi_alm', 'ait_114a_hihi_alm', 'ac_volt', 'bc_volt', 'ab_volt', 'psd_alm', 'ait_114a_lolo_alm', 'ait_114a_lo_alm', 'ait_114r_lolo_alm', 'ait_114r_lo_alm', 'ait_114z_lo_alm', 'ait_114z_lolo_alm', 'ait_114x_lo_alm', 'ait_114x_lolo_alm', 'ait_114c_lolo_alm', 'ait_114c_lo_alm', 'ait_114l_lolo_alm', 'ait_114l_lo_alm', 'lit_116b_hihi_alm', 'lit_116b_hi_alm', 'lit_116a_hihi_alm', 'lit_116a_hi_alm']\n",
"\n",
"#Create a Sheet for each Device\n",
"for device in telemetry.keys():\n",
" df = getDataFrame(telemetry[device], ignore_keys, time)\n",
" \n",
" # Write the dataframe data to XlsxWriter. Turn off the default header and\n",
" # index and skip one row to allow us to insert a user defined header.\n",
" df.to_excel(writer, sheet_name=device, startrow=0, header=True, index=True, float_format=\"%.2f\")\n",
"\n",
" # Get the xlsxwriter workbook and worksheet objects.\n",
" workbook = writer.book\n",
" worksheet = writer.sheets[device]\n",
" for row in worksheet.iter_rows(min_row=2, max_col=1):\n",
" for cell in row:\n",
" cell.number_format = 'yyyy-mm-dd hh:mm:ss'\n",
"\n",
"#Getting the data sheet for ACW #1 to access date range actually available\n",
"datasheet = writer.book.worksheets[1]\n",
"datetime_min = datasheet[\"A2\"].value\n",
"last_data_row = get_last_data_row(datasheet)\n",
"datetime_max = datasheet[f\"A{last_data_row}\"].value\n",
"#Convert to excel number\n",
"datetime_min = to_excel(datetime_min)\n",
"datetime_max = round(to_excel(datetime_max))\n",
"#Change the range of the chart\n",
"chart = reportsheet._charts[0]\n",
"chart.x_axis.scaling.min = datetime_min\n",
"chart.x_axis.scaling.max = datetime_max\n",
"chart.x_axis.number_format = 'hh:mm'\n",
"reportsheet[\"B9\"] = dt.fromtimestamp(getTime(time)[0]/1000).strftime('%m/%d/%Y')\n",
"reportsheet[\"B10\"] = \"Test Well Name\"\n",
"reportsheet[\"B11\"] = \"Test Well Lead\"\n",
"reportsheet[\"B12\"] = \"Test COPA Lead\"\n",
"reportsheet[\"B13\"] = \"Test Job Name\"\n",
"\n",
"reportsheet[\"B16\"]= \"Test Events or Spills\"\n",
"reportsheet[\"B18\"] = \"Test Issues\"\n",
"\n",
"reportsheet[\"E10\"] = \"A very large summary test text to put into perspective the amount\\n of work that is having to be done to this sheet\\n for this to work\"\n",
"# Close the Pandas Excel writer and output the Excel file.\n",
"writer.close()\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"df"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Create an AWS SES client\n",
"ses_client = boto3.client('ses', region_name='us-east-1')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Create an email message\n",
"emails = [\"nmelone@henry-pump.com\"]\n",
"\"\"\"emails = [\n",
" \"dvaught@thunderbirdfs.com\", \n",
" \"rkamper@thunderbirdfs.com\", \n",
" \"john.griffin@acaciaes.com\", \n",
" \"Joshua.Fine@fineelectricalservices2018.com\"\n",
"]\"\"\"\n",
"msg = MIMEMultipart()\n",
"msg['Subject'] = \"Thunderbird Field Services\"\n",
"msg['From'] = 'alerts@henry-pump.com'\n",
"msg['To'] = \", \".join(emails)\n",
"\n",
"# Add a text body to the message (optional)\n",
"body_text = 'Please find the attached spreadsheets.'\n",
"msg.attach(MIMEText(body_text, 'plain'))\n",
"\n",
"\n",
"# Attach the file to the email message\n",
"attachment = MIMEBase('application', 'octet-stream')\n",
"attachment.set_payload(open(f\"/tmp/Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx\", \"rb\").read())\n",
"encoders.encode_base64(attachment)\n",
"attachment.add_header('Content-Disposition', 'attachment', filename=f\"Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx\")\n",
"msg.attach(attachment)\n",
"\n",
"# Send the email using AWS SES\n",
"response = ses_client.send_raw_email(\n",
" \n",
" RawMessage={'Data': msg.as_string()}\n",
")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "tbreport",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}