613 lines
31 KiB
Plaintext
613 lines
31 KiB
Plaintext
{
|
|
"cells": [
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 1,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"import logging, json, boto3, pytz, math, os, shutil\n",
|
|
"from openpyxl.utils import get_column_letter\n",
|
|
"from openpyxl.utils.datetime import CALENDAR_WINDOWS_1900, to_excel\n",
|
|
"import pandas as pd\n",
|
|
"from datetime import datetime as dt\n",
|
|
"from datetime import timedelta as td\n",
|
|
"import datetime as dtf\n",
|
|
"from tb_rest_client.rest_client_ce import *\n",
|
|
"from tb_rest_client.rest import ApiException\n",
|
|
"from email.mime.multipart import MIMEMultipart\n",
|
|
"from email.mime.text import MIMEText\n",
|
|
"from email import encoders\n",
|
|
"from email.mime.base import MIMEBase"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 2,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"logging.basicConfig(level=logging.DEBUG,\n",
|
|
" format='%(asctime)s - %(levelname)s - %(module)s - %(lineno)d - %(message)s',\n",
|
|
" datefmt='%Y-%m-%d %H:%M:%S')"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 3,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# ThingsBoard REST API URL\n",
|
|
"url = \"https://www.enxlekkocloud.com\" #\"https://hp.henrypump.cloud\"\n",
|
|
"# Default Tenant Administrator credentials\n",
|
|
"username = \"nico.a.melone@gmail.com\" #\"henry.pump.automation@gmail.com\"\n",
|
|
"password = \"9EE#mqb*b6bXV9hJrPYGm&w3q5Y@3acumvvb5isQ\" #\"Henry Pump @ 2022\""
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 4,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def getDevices(rest_client, customers,target_customer, page=0, pageSize=500):\n",
|
|
" for c in customers.data:\n",
|
|
" if c.name == target_customer:\n",
|
|
" cid = c.id.id\n",
|
|
" devices = rest_client.get_customer_devices(customer_id=cid, page_size=pageSize, page=page)\n",
|
|
" return devices #.to_dict()"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 5,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def getDeviceKeys(rest_client, devices,target_device):\n",
|
|
" try:\n",
|
|
" for d in devices.data:\n",
|
|
" if d.name == target_device:\n",
|
|
" device = d\n",
|
|
" keys = rest_client.get_timeseries_keys_v1(d.id)\n",
|
|
" return device, keys, None\n",
|
|
" return None, None,\"Device Not Found\"\n",
|
|
" except Exception as e:\n",
|
|
" logging.error(\"Something went wrong in getDeviceKeys\")\n",
|
|
" logging.error(e)\n",
|
|
" return (None, None, e)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 6,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def getTelemetry(rest_client, device, keys, start_ts, end_ts,limit):\n",
|
|
" try:\n",
|
|
" return rest_client.get_timeseries(entity_id=device.id, keys=keys, start_ts=start_ts, end_ts=end_ts, limit=limit) #entity_type=entity_type, \n",
|
|
" except Exception as e:\n",
|
|
" logging.error(\"Something went wrong in getTelemetry\")\n",
|
|
" logging.error(e)\n",
|
|
" return False\n",
|
|
" "
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 7,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def getTime(timeRequest):\n",
|
|
" start_ts, end_ts = 0,0\n",
|
|
" if timeRequest[\"type\"] == \"last\":\n",
|
|
" now = dt.now()\n",
|
|
" delta = td(days=timeRequest[\"days\"], seconds=timeRequest[\"seconds\"], microseconds=timeRequest[\"microseconds\"], milliseconds=timeRequest[\"milliseconds\"], minutes=timeRequest[\"minutes\"], hours=timeRequest[\"hours\"], weeks=timeRequest[\"weeks\"])\n",
|
|
" start_ts = str(int(dt.timestamp(now - delta) * 1000))\n",
|
|
" end_ts = str(int(dt.timestamp(now) * 1000))\n",
|
|
" elif timeRequest[\"type\"] == \"midnight-midnight\":\n",
|
|
" timezone = pytz.timezone(timeRequest[\"timezone\"])\n",
|
|
" today = dtf.date.today()\n",
|
|
" yesterday_midnight = dtf.datetime.combine(today - dtf.timedelta(days=1), dtf.time())\n",
|
|
" today_midnight = dtf.datetime.combine(today, dtf.time())\n",
|
|
" yesterday_midnight = timezone.localize(yesterday_midnight)\n",
|
|
" today_midnight = timezone.localize(today_midnight)\n",
|
|
" start_ts = int(yesterday_midnight.timestamp()) * 1000\n",
|
|
" end_ts = int(today_midnight.timestamp()) * 1000\n",
|
|
" elif timeRequest[\"type\"] == \"range\":\n",
|
|
" start_ts = timeRequest[\"ts_start\"]\n",
|
|
" end_ts = timeRequest[\"ts_end\"]\n",
|
|
" return (start_ts, end_ts)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 8,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def getThingsBoardData(url, username, password, targetCustomer, timeRequest):\n",
|
|
" # Creating the REST client object with context manager to get auto token refresh\n",
|
|
" with RestClientCE(base_url=url) as rest_client:\n",
|
|
" try:\n",
|
|
" # Auth with credentials\n",
|
|
" rest_client.login(username=username, password=password)\n",
|
|
" # Get customers > get devices under a target customer > get keys for devices > get data for devices\n",
|
|
" customers = rest_client.get_customers(page_size=\"500\", page=\"0\")\n",
|
|
" devices = getDevices(rest_client=rest_client, customers=customers, target_customer=targetCustomer)\n",
|
|
" telemetry = {}\n",
|
|
" for d in devices.data:\n",
|
|
" #print(d.name)\n",
|
|
" device, keys, err = getDeviceKeys(rest_client=rest_client, devices=devices, target_device=d.name)\n",
|
|
" start_ts, end_ts = getTime(timeRequest)\n",
|
|
" print(len(keys), keys)\n",
|
|
" telemetry[d.name] = getTelemetry(rest_client=rest_client, device=device, keys=','.join(keys), start_ts=start_ts, end_ts=end_ts, limit=50000)\n",
|
|
" return telemetry\n",
|
|
" except ApiException as e:\n",
|
|
" logging.error(e)\n",
|
|
" return False\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 9,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def getMaxWidth():\n",
|
|
" label_mapping = {\n",
|
|
" \"Lit 116b Level\": \"WASTE TANK 1\",\n",
|
|
" \"Lit 116a Level\": \"WASTE TANK 2\",\n",
|
|
" \"Fit 100 Flow Rate\": \"INLET FLOW RATE\",\n",
|
|
" \"Fit 109b Flow Rate\": \"SALES FLOW RATE\",\n",
|
|
" \"Outlet Turbidity Temp\": \"OUTLET TURBIDITY TEMP\",\n",
|
|
" \"Outlet Orp Temp\": \"OUTLET ORP TEMP\",\n",
|
|
" \"Inlet Turbidity Temp\": \"INLET TURBIDITY TEMP\",\n",
|
|
" \"Inlet Ph Temp\": \"INLET PH TEMP\",\n",
|
|
" \"Ait 102b H2s\": \"INLET H₂S\",\n",
|
|
" \"At 109b H2s\": \"OUTLET H₂S\",\n",
|
|
" \"At 109c Oil In Water\": \"OUTLET OIL IN WATER\",\n",
|
|
" \"Ait 102a Turbitity\": \"INLET TURBIDITY\",\n",
|
|
" \"At 109a Turbidity\": \"OUTLET TURBIDITY\",\n",
|
|
" \"At 109e Orp\": \"OUTLET ORP\"\n",
|
|
" }\n",
|
|
" width = 0\n",
|
|
" for key,value in label_mapping.items():\n",
|
|
" if(len(value) > width):\n",
|
|
" width = len(value)\n",
|
|
"\n",
|
|
" return width"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 10,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def formatColumnName(telemetryName):\n",
|
|
" name = \" \".join([x.capitalize() for x in telemetryName.split(\"_\")])\n",
|
|
" label_mapping = {\n",
|
|
" \"Lit 116b Level\": \"WASTE TANK 1\",\n",
|
|
" \"Lit 116a Level\": \"WASTE TANK 2\",\n",
|
|
" \"Fit 100 Flow Rate\": \"INLET FLOW RATE\",\n",
|
|
" \"Fit 109b Flow Rate\": \"SALES FLOW RATE\",\n",
|
|
" \"Outlet Turbidity Temp\": \"OUTLET TURBIDITY TEMP\",\n",
|
|
" \"Outlet Orp Temp\": \"OUTLET ORP TEMP\",\n",
|
|
" \"Inlet Turbidity Temp\": \"INLET TURBIDITY TEMP\",\n",
|
|
" \"Inlet Ph Temp\": \"INLET PH TEMP\",\n",
|
|
" \"Ait 102b H2s\": \"INLET H₂S\",\n",
|
|
" \"At 109b H2s\": \"OUTLET H₂S\",\n",
|
|
" \"At 109c Oil In Water\": \"OUTLET DENSITY\",\n",
|
|
" \"Ait 102a Turbitity\": \"INLET TURBIDITY\",\n",
|
|
" \"At 109a Turbidity\": \"OUTLET TURBIDITY\",\n",
|
|
" \"At 109e Orp\": \"OUTLET ORP\",\n",
|
|
" \"Ait 102d Oil In Water\": \"INLET DENSITY\"\n",
|
|
" }\n",
|
|
" return label_mapping.get(name, name)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 11,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def formatChartName(telemetryName):\n",
|
|
" return \" \".join([x.upper() for x in telemetryName.split(\"_\")])"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def getDataFrame(telemetry, keys, time): \n",
|
|
" df = pd.DataFrame()\n",
|
|
" #for location in telemetry.keys():\n",
|
|
" # Iterate through each datapoint within each location\n",
|
|
" for datapoint in telemetry.keys():\n",
|
|
" # Convert the datapoint list of dictionaries to a DataFrame\n",
|
|
" if datapoint in keys:\n",
|
|
" temp_df = pd.DataFrame(telemetry[datapoint])\n",
|
|
" temp_df['ts'] = pd.to_datetime(temp_df['ts'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(time[\"timezone\"]).dt.tz_localize(None)\n",
|
|
" # Set 'ts' as the index\n",
|
|
" temp_df.set_index('ts', inplace=True)\n",
|
|
" temp_df[\"value\"] = pd.to_numeric(temp_df[\"value\"], errors=\"coerce\")\n",
|
|
" # Rename 'value' column to the name of the datapoint\n",
|
|
" temp_df.rename(columns={'value': formatColumnName(datapoint)}, inplace=True)\n",
|
|
" \n",
|
|
" # Join the temp_df to the main DataFrame\n",
|
|
" df = df.join(temp_df, how='outer')\n",
|
|
" df.ffill()\n",
|
|
" #df = df.fillna(method='ffill', limit=2)\n",
|
|
" df = df.reindex(sorted(df.columns), axis=1)\n",
|
|
" # Rename index to 'Date'\n",
|
|
" df.rename_axis('Date', inplace=True)\n",
|
|
" return df\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def getManualDataFrame(telemetry, keys, time): \n",
|
|
" df = pd.DataFrame()\n",
|
|
" for key in keys:\n",
|
|
" if key not in telemetry.keys():\n",
|
|
" telemetry[key] = [{'ts': dt.timestamp(dt.now()), 'value': '0'}]\n",
|
|
" for datapoint in telemetry.keys():\n",
|
|
" if datapoint in keys:\n",
|
|
" temp_df = pd.DataFrame(telemetry[datapoint])\n",
|
|
" temp_df['ts'] = pd.to_datetime(temp_df['ts'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(time[\"timezone\"]).dt.tz_localize(None)\n",
|
|
" temp_df.set_index('ts', inplace=True)\n",
|
|
" if datapoint in [\"manual_next_pigging_scheduled\"]:\n",
|
|
" temp_df[\"value\"] = pd.to_datetime(temp_df['value'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(time[\"timezone\"]).dt.tz_localize(None)\n",
|
|
" print(temp_df)\n",
|
|
" elif datapoint in [\"manual_equipment_description\",\"manual_issues_concerns\"]:\n",
|
|
" temp_df[\"value\"] = temp_df[\"value\"].astype(str)\n",
|
|
" else:\n",
|
|
" temp_df[\"value\"] = pd.to_numeric(temp_df[\"value\"], errors=\"coerce\")\n",
|
|
" temp_df.rename(columns={'value': formatColumnName(datapoint)}, inplace=True)\n",
|
|
"\n",
|
|
" df = df.join(temp_df, how='outer')\n",
|
|
"\n",
|
|
" # Take the latest non-null value for each column\n",
|
|
" latest_values = df.apply(lambda x: x.dropna().iloc[-1] if not x.dropna().empty else None)\n",
|
|
"\n",
|
|
" # Convert to a single-row DataFrame\n",
|
|
" df = pd.DataFrame([latest_values])\n",
|
|
"\n",
|
|
" df = df.reindex(sorted(df.columns), axis=1)\n",
|
|
" df.rename_axis('Date', inplace=True)\n",
|
|
"\n",
|
|
" return df\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def getSampleDataFrame(telemetry, keys, time): \n",
|
|
" df = pd.DataFrame()\n",
|
|
"\n",
|
|
" for datapoint in telemetry.keys():\n",
|
|
" if datapoint in keys:\n",
|
|
" temp_df = pd.DataFrame(telemetry[datapoint])\n",
|
|
" temp_df['ts'] = pd.to_datetime(temp_df['ts'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(time[\"timezone\"]).dt.tz_localize(None)\n",
|
|
" temp_df.set_index('ts', inplace=True)\n",
|
|
" if datapoint in [\"manual_sample_time\"]:\n",
|
|
" temp_df[\"value\"] = pd.to_datetime(temp_df['value'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(time[\"timezone\"]).dt.tz_localize(None)\n",
|
|
" print(temp_df)\n",
|
|
" elif datapoint in [\"manual_sample_datapoint\", \"manual_sample_lab\", \"manual_sample_location\"]:\n",
|
|
" temp_df[\"value\"] = temp_df[\"value\"].astype(str)\n",
|
|
" else:\n",
|
|
" temp_df[\"value\"] = pd.to_numeric(temp_df[\"value\"], errors=\"coerce\")\n",
|
|
" temp_df.rename(columns={'value': formatColumnName(datapoint)}, inplace=True)\n",
|
|
"\n",
|
|
" df = df.join(temp_df, how='outer')\n",
|
|
"\n",
|
|
" df = df.reindex(sorted(df.columns), axis=1)\n",
|
|
" df.rename_axis('Date', inplace=True)\n",
|
|
"\n",
|
|
" return df"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 12,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def process_dataframe(telemetry, keys, time, special_handling=None, latest_only=False): \n",
|
|
" df = pd.DataFrame()\n",
|
|
"\n",
|
|
" # If latest_only is True, ensure missing keys are initialized\n",
|
|
" if latest_only:\n",
|
|
" for key in keys:\n",
|
|
" if key not in telemetry:\n",
|
|
" telemetry[key] = [{'ts': dt.timestamp(dt.now()), 'value': '0'}]\n",
|
|
"\n",
|
|
" for datapoint in telemetry.keys():\n",
|
|
" if datapoint in keys:\n",
|
|
" temp_df = pd.DataFrame(telemetry[datapoint])\n",
|
|
" temp_df['ts'] = pd.to_datetime(temp_df['ts'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(time[\"timezone\"]).dt.tz_localize(None)\n",
|
|
" temp_df.set_index('ts', inplace=True)\n",
|
|
"\n",
|
|
" if special_handling and datapoint in special_handling.get(\"datetime\", []):\n",
|
|
" temp_df[\"value\"] = pd.to_datetime(temp_df['value'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(time[\"timezone\"]).dt.tz_localize(None)\n",
|
|
" elif special_handling and datapoint in special_handling.get(\"string\", []):\n",
|
|
" temp_df[\"value\"] = temp_df[\"value\"].astype(str)\n",
|
|
" else:\n",
|
|
" temp_df[\"value\"] = pd.to_numeric(temp_df[\"value\"], errors=\"coerce\")\n",
|
|
"\n",
|
|
" temp_df.rename(columns={'value': formatColumnName(datapoint)}, inplace=True)\n",
|
|
" df = df.join(temp_df, how='outer')\n",
|
|
"\n",
|
|
" if latest_only:\n",
|
|
" latest_values = df.apply(lambda x: x.dropna().iloc[-1] if not x.dropna().empty else None)\n",
|
|
" df = pd.DataFrame([latest_values])\n",
|
|
"\n",
|
|
" df = df.reindex(sorted(df.columns), axis=1)\n",
|
|
" df.rename_axis('Date', inplace=True)\n",
|
|
"\n",
|
|
" return df\n",
|
|
"\n",
|
|
"# Usage\n",
|
|
"def getDataFrame(telemetry, keys, time):\n",
|
|
" return process_dataframe(telemetry, keys, time)\n",
|
|
"\n",
|
|
"def getManualDataFrame(telemetry, keys, time):\n",
|
|
" return process_dataframe(\n",
|
|
" telemetry, keys, time, \n",
|
|
" special_handling={\n",
|
|
" \"datetime\": [\"manual_next_pigging_scheduled\"],\n",
|
|
" \"string\": [\"manual_equipment_description\", \"manual_issues_concerns\"]\n",
|
|
" }, \n",
|
|
" latest_only=True\n",
|
|
" )\n",
|
|
"\n",
|
|
"def getSampleDataFrame(telemetry, keys, time):\n",
|
|
" return process_dataframe(\n",
|
|
" telemetry, keys, time, \n",
|
|
" special_handling={\n",
|
|
" \"datetime\": [\"manual_sample_time\"],\n",
|
|
" \"string\": [\"manual_sample_datapoint\", \"manual_sample_lab\", \"manual_sample_location\"]\n",
|
|
" }\n",
|
|
" )\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 13,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def get_last_data_row(ws):\n",
|
|
" # Start from the bottom row and work up to find the last row with data\n",
|
|
" for row in range(ws.max_row, 0, -1):\n",
|
|
" if any(cell.value is not None for cell in ws[row]):\n",
|
|
" return row\n",
|
|
" return 0 # If no data is found, return 0"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 16,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"261 ['latitude', 'longitude', 'speed', 'a_current', 'b_current', 'c_current', 'scada_stop_cmd', 'pit_100a_pressure', 'pit_101a_pressure', 'pit_101b_pressure', 'pit_101c_pressure', 'fit_101_flow_rate', 'fi_101b_popoff', 'fcv_101a_valve', 'fcv_101b_valve', 'pit_102_pressure', 'pit_102_hi_alm', 'pit_102_hihi_alm', 'pit_102_hi_spt', 'pit_102_hihi_spt', 'p200_hand', 'p200_auto', 'xy_200_run', 'ct_200_run', 'pit_100_pressure', 'm106a_vfd_active', 'm106a_vfd_faulted', 'm106a_vfd_frequency', 'm106a_vfd_start', 'm106a_vfd_stop', 'pit_106a_pressure', 'fit_106a_flow_rate', 'm106b_vfd_active', 'm106b_vfd_faulted', 'm106b_vfd_frequency', 'm106b_vfd_start', 'm106b_vfd_stop', 'pit_106b_pressure', 'fit_106b_flow_rate', 'pit_106c_pressure', 'pit_106d_pressure', 'sdv106_open', 'sdv106_closed', 'bp_3a_auto', 'bp_3a_hand', 'bp_3a_run_cmd', 'bp_3a_run', 'bp_3a_fault', 'bp_3b_auto', 'bp_3b_hand', 'bp_3b_run_cmd', 'bp_3b_run', 'bp_3b_fault', 'pit_107a_pressure', 'fit_107a_flow_rate', 'pit_107b_pressure', 'fcv_001_valve', 'fit_107b_flow_rate', 'pit_107d_pressure', 'fcv_002_valve', 'pit_107c_pressure', 'pit_108a_pressure', 'pit_108b_pressure', 'dpi_108a_pressure', 'pit_108c_pressure', 'pit_108d_pressure', 'pdt_108b_pressure', 'pit_108e_pressure', 'pit_108f_pressure', 'pdt_108c_pressure', 'pit_108_pressure', 'pdt_108a_hi_alm', 'pdt_108a_hihi_alm', 'pdt_108b_hi_alm', 'pdt_108b_hihi_alm', 'pdt_108c_hi_alm', 'pdt_108c_hihi_alm', 'ait_102a_turbitity', 'ait_102b_h2s', 'ait_102c_ph', 'ait_102d_oil_in_water', 'fit_102_flow_rate', 'lit_112a_h2o2_level', 'lit_112b_nahso3_level', 'fis_112_h2o2_popoff', 'fit_112a_h2o2_flow_rate', 'fit_112b_nahso3_flow_rate', 'at_109a_turbidity', 'at_109b_h2s', 'at_109c_oil_in_water', 'at_109d_o2_in_water', 'at_109e_orp', 'fit_109a_flow_rate', 'fit_100_flow_rate', 'fit_100_hi_alm', 'fit_100_hihi_alm', 'fit_100_lo_alm', 'fit_111_flow_rate', 'pit_110_pressure', 'lit_170_level', 'lit_200_level', 'lit_101_level', 'li_103D_level_alm', 'lsh_120_hihi_alm', 'pit_050_pressure', 'pit_065_pressure', 'pdi_065_pressure', 'fit_104_n2_rate', 'p100_auto', 'p100_hand', 'sales_recirculate_sw', 'fit_109b_flow_rate', 'pit_111a_n2', 'pit_111b_n2', 'pit_111c_n2', 'ct_200_current', 'sdv_101a', 'xy_100_run', 'skim_total_barrels', 'dpi_108b_pressure', 'chemical_pump_01_run_status', 'chemical_pump_01_rate_offset', 'spt_pid_h2o2_chemical_rate', 'spt_chemical_manual_rate', 'chemical_pump_auto', 'esd_exists', 'n2_purity', 'n2_outlet_flow_rate', 'n2_outlet_temp', 'n2_inlet_pressure', 'compressor_controller_temp', 'compressor_ambient_temp', 'compressor_outlet_temp', 'compressor_outlet_pressure', 'n2_outlet_pressure', 'fit_109b_water_job', 'fit_109b_water_last_month', 'fit_109b_water_month', 'fit_109b_water_lifetime', 'fit_109b_water_today', 'fit_109b_water_yesterday', 'fit_100_water_job', 'fit_100_water_last_month', 'fit_100_water_month', 'fit_100_water_lifetime', 'fit_100_water_today', 'fit_100_water_yesterday', 'h2o2_chemical_rate', 'rmt_sd_alm', 'pnl_esd_alm', 'pit_111c_hihi_alm', 'pit_111b_hihi_alm', 'pit_111a_hihi_alm', 'pit_110_hihi_alm', 'pit_108g_hihi_alm', 'pit_108c_hihi_alm', 'pit_108b_hihi_alm', 'pit_108a_hihi_alm', 'pit_107b_lolo_alm', 'pit_107a_lolo_alm', 'pit_106b_hihi_alm', 'pit_106a_hihi_alm', 'pit_101b_transmitter_alm', 'pit_101b_hihi_alm', 'pit_101a_transmitter_alm', 'pit_101a_hihi_alm', 'pit_101a_hi_alm', 'pit_100_hihi_alm', 'pit_065_hihi_alm', 'pit_050_hihi_alm', 'pdi_065_lolo_alm', 'pdi_065_lo_alm', 'pdi_065_hihi_alm', 'm106b_vfd_faulted_alm', 'm106a_vfd_faulted_alm', 'lit_200_hihi_alm', 'lit_170_hihi_alm', 'fit_107b_lolo_alm', 'fit_107a_lolo_alm', 'fit_106b_hihi_alm', 'fit_106a_hihi_alm', 'fit_004_hihi_alm', 'bp_3b_run_fail_alm', 'bp_3a_run_fail_alm', 'ait_114c_hihi_alm', 'ait_114b_hihi_alm', 'ait_114a_hihi_alm', 'ac_volt', 'bc_volt', 'ab_volt', 'psd_alm', 'ait_114a_lolo_alm', 'ait_114a_lo_alm', 'ait_114r_lolo_alm', 'ait_114r_lo_alm', 'ait_114z_lo_alm', 'ait_114z_lolo_alm', 'ait_114x_lo_alm', 'ait_114x_lolo_alm', 'ait_114c_lolo_alm', 'ait_114c_lo_alm', 'ait_114l_lolo_alm', 'ait_114l_lo_alm', 'lit_116b_level', 'lit_116b_hihi_alm', 'lit_116b_hi_alm', 'lit_116a_level', 'lit_116a_hihi_alm', 'lit_116a_hi_alm', 'outlet_turbidity_temp', 'outlet_orp_temp', 'inlet_turbidity_temp', 'inlet_ph_temp', 'n2_run_time_lifetime', 'compressor_lifetime_run_hours', 'ef_vfd_1_fault_description', 'ef_vfd_1_n2_frequency', 'ef_vfd_2_running', 'ef_vfd_2_n2_hand_spt', 'ef_vfd_2_n2_frequency', 'ef_vfd_2_n2_faulted_alm', 'ef_vfd_2_n2_auto_room_spt', 'ef_vfd_2_n2_auto', 'ef_vfd_2_fault_description', 'ef_vfd_1_running', 'ef_vfd_1_n2_hand_spt', 'ef_vfd_1_n2_faulted_alm', 'ef_vfd_1_n2_auto_room_spt', 'ef_vfd_1_n2_auto', 'n2_inlet_dew_point', 'manual_water_to_tanks_time', 'manual_sample_time', 'manual_sample_value', 'manual_sample_lab', 'manual_sample_datapoint', 'manual_sample_location', 'manual_equipment_description', 'manual_water_events', 'manual_diverted_water_time', 'manual_standby_time', 'manual_equipment_time', 'manual_unit_uptime', 'manual_water_events_time', 'manual_clean_water_sold_per_job', 'manual_skim_oil_discharged_per_job', 'manual_h202_on_hand', 'manual_coagulant_on_hand', 'manual_upright_tank_issues', 'manual_vac_truck_batches', 'manual_cartridge_filter_changes', 'manual_bag_filter_changes', 'outlet_ph', 'lit_110a_level', 'fit_106b_yesterday', 'fit_106b_today', 'fit_106b_this_month', 'fit_106b_lifetime', 'fit_106b_last_month', 'fit_106b_job', 'fcv_101a_position', 'outlet_o2']\n"
|
|
]
|
|
}
|
|
],
|
|
"source": [
|
|
"\"\"\"time = {\n",
|
|
" \"type\": \"last\",\n",
|
|
" \"days\":1,\n",
|
|
" \"seconds\":0,\n",
|
|
" \"microseconds\":0,\n",
|
|
" \"milliseconds\":0,\n",
|
|
" \"minutes\":0,\n",
|
|
" \"hours\":0,\n",
|
|
" \"weeks\":0,\n",
|
|
" \"timezone\": \"US/Alaska\"\n",
|
|
" }\n",
|
|
" \"\"\"\n",
|
|
"time = {\n",
|
|
" \"type\": \"midnight-midnight\",\n",
|
|
" \"timezone\": \"US/Alaska\" \n",
|
|
"}\n",
|
|
"\"\"\"\n",
|
|
" time = {\n",
|
|
" \"type\": \"range\",\n",
|
|
" \"timezone\": \"US/Alaska\" ,\n",
|
|
" \"ts_start\": 1728115200000,\n",
|
|
" \"ts_end\": 1728201600000\n",
|
|
"} \"\"\"\n",
|
|
"telemetry = getThingsBoardData(url, username, password, \"Thunderbird Field Services\", time)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"len(telemetry[\"ACW #1\"].keys())"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"telemetry['ACW #1']['manual_clean_water_sold_per_job']"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 18,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"name": "stderr",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"/var/folders/dd/glmkqm595_n53prmxzd7qh980000gn/T/ipykernel_98236/3556243203.py:17: FutureWarning: The behavior of 'to_datetime' with 'unit' when parsing strings is deprecated. In a future version, strings will be parsed as datetime strings, matching the behavior without a 'unit'. To retain the old behavior, explicitly cast ints or floats to numeric type before calling to_datetime.\n",
|
|
" temp_df[\"value\"] = pd.to_datetime(temp_df['value'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(time[\"timezone\"]).dt.tz_localize(None)\n",
|
|
"/var/folders/dd/glmkqm595_n53prmxzd7qh980000gn/T/ipykernel_98236/3556243203.py:17: FutureWarning: The behavior of 'to_datetime' with 'unit' when parsing strings is deprecated. In a future version, strings will be parsed as datetime strings, matching the behavior without a 'unit'. To retain the old behavior, explicitly cast ints or floats to numeric type before calling to_datetime.\n",
|
|
" temp_df[\"value\"] = pd.to_datetime(temp_df['value'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(time[\"timezone\"]).dt.tz_localize(None)\n"
|
|
]
|
|
}
|
|
],
|
|
"source": [
|
|
"# Create a Pandas Excel writer using XlsxWriter as the engine.\n",
|
|
"shutil.copyfile('/Users/nico/Documents/GitHub/ThingsBoard/EKKO Reports/thunderbirdfs-daily-report/thunderbirdfsreport/ACW Daily Report Template.xlsx', f\"/Users/nico/Documents/test/Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx\")\n",
|
|
"writer = pd.ExcelWriter(\n",
|
|
" f\"/Users/nico/Documents/test/Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx\", \n",
|
|
" engine=\"openpyxl\",\n",
|
|
" datetime_format=\"yyyy-mm-dd hh:mm:ss\",\n",
|
|
" date_format=\"yyyy-mm-dd\",\n",
|
|
" #engine_kwargs={'options': {'strings_to_numbers': True}},\n",
|
|
" mode=\"a\",\n",
|
|
" if_sheet_exists=\"overlay\")\n",
|
|
"reportsheet = writer.book.worksheets[0]\n",
|
|
"\n",
|
|
"keys = ['ait_102a_turbitity','ait_102b_h2s', 'at_109a_turbidity', 'at_109b_h2s', 'at_109c_oil_in_water', 'at_109e_orp', 'fit_100_flow_rate', 'fit_109b_flow_rate', 'lit_116b_level', 'lit_116a_level', 'outlet_turbidity_temp', 'outlet_orp_temp', 'inlet_turbidity_temp', 'inlet_ph_temp', 'ait_102d_oil_in_water', 'outlet_ph']\n",
|
|
"manual_keys = ['manual_bag_filter_changes', 'manual_cartridge_filter_changes', 'manual_clean_water_sold_per_job', 'manual_coagulant_on_hand', 'manual_diverted_water_time', 'manual_equipment_description', 'manual_equipment_time', 'manual_h202_on_hand', 'manual_issues_concerns', 'manual_next_pigging_scheduled', 'manual_skim_oil_discharged_per_job', 'manual_standby_time', 'manual_unit_uptime', 'manual_upright_tank_issues', 'manual_vac_truck_batches', 'manual_water_events', 'manual_water_events_time', 'manual_water_to_tanks_time']\n",
|
|
"sample_keys = ['manual_sample_datapoint', 'manual_sample_lab', 'manual_sample_location', 'manual_sample_time', 'manual_sample_value'] \n",
|
|
"#Create a Sheet for each Device\n",
|
|
"for device in telemetry.keys():\n",
|
|
" df = getDataFrame(telemetry[device], keys, time)\n",
|
|
" dfm = getManualDataFrame(telemetry[device], manual_keys, time)\n",
|
|
" dfs = getSampleDataFrame(telemetry[device], sample_keys, time)\n",
|
|
" # Write the dataframe data to XlsxWriter. Turn off the default header and\n",
|
|
" # index and skip one row to allow us to insert a user defined header.\n",
|
|
" df.to_excel(writer, sheet_name=device, startrow=0, header=True, index=True, float_format=\"%.2f\")\n",
|
|
" dfm.to_excel(writer, sheet_name=device+\" Manual Entry\", startrow=0, header=True, index=True, float_format=\"%.2f\")\n",
|
|
" dfs.to_excel(writer, sheet_name=device+\" Manual Samples\", startrow=0, header=True, index=True, float_format=\"%.2f\")\n",
|
|
" # Get the xlsxwriter workbook and worksheet objects.\n",
|
|
" workbook = writer.book\n",
|
|
" worksheet = writer.sheets[device]\n",
|
|
" for row in worksheet.iter_rows(min_row=2, max_col=1):\n",
|
|
" for cell in row:\n",
|
|
" cell.number_format = 'yyyy-mm-dd hh:mm:ss'\n",
|
|
"\n",
|
|
"#Getting the data sheet for ACW #1 to access date range actually available\n",
|
|
"datasheet = writer.book.worksheets[1]\n",
|
|
"datetime_min = datasheet[\"A2\"].value\n",
|
|
"last_data_row = get_last_data_row(datasheet)\n",
|
|
"datetime_max = datasheet[f\"A{last_data_row}\"].value\n",
|
|
"#Convert to excel number\n",
|
|
"datetime_min = to_excel(datetime_min)\n",
|
|
"datetime_max = round(to_excel(datetime_max))\n",
|
|
"for chart in reportsheet._charts:\n",
|
|
" #Change the range of the chart\n",
|
|
" #chart = reportsheet._charts[0]\n",
|
|
" chart.x_axis.scaling.min = datetime_min\n",
|
|
" chart.x_axis.scaling.max = datetime_max\n",
|
|
" chart.x_axis.number_format = 'hh:mm'\n",
|
|
"\n",
|
|
"reportsheet[\"B4\"].value = dt.fromtimestamp(getTime(time)[0]/1000).strftime('%m/%d/%Y')\n",
|
|
"\"\"\"\n",
|
|
"reportsheet[\"B5\"] = \"Test Well Name\"\n",
|
|
"\"\"\"\n",
|
|
"# Close the Pandas Excel writer and output the Excel file.\n",
|
|
"writer.close()\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"dfm"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Create an AWS SES client\n",
|
|
"ses_client = boto3.client('ses', region_name='us-east-1')"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Create an email message\n",
|
|
"emails = [\"nmelone@henry-pump.com\"]\n",
|
|
"\"\"\"emails = [\n",
|
|
" \"dvaught@thunderbirdfs.com\", \n",
|
|
" \"rkamper@thunderbirdfs.com\", \n",
|
|
" \"john.griffin@acaciaes.com\", \n",
|
|
" \"Joshua.Fine@fineelectricalservices2018.com\"\n",
|
|
"]\"\"\"\n",
|
|
"msg = MIMEMultipart()\n",
|
|
"msg['Subject'] = \"Thunderbird Field Services\"\n",
|
|
"msg['From'] = 'alerts@henry-pump.com'\n",
|
|
"msg['To'] = \", \".join(emails)\n",
|
|
"\n",
|
|
"# Add a text body to the message (optional)\n",
|
|
"body_text = 'Please find the attached spreadsheets.'\n",
|
|
"msg.attach(MIMEText(body_text, 'plain'))\n",
|
|
"\n",
|
|
"\n",
|
|
"# Attach the file to the email message\n",
|
|
"attachment = MIMEBase('application', 'octet-stream')\n",
|
|
"attachment.set_payload(open(f\"/tmp/Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx\", \"rb\").read())\n",
|
|
"encoders.encode_base64(attachment)\n",
|
|
"attachment.add_header('Content-Disposition', 'attachment', filename=f\"Thunderbird_{dt.today().strftime('%Y-%m-%d')}.xlsx\")\n",
|
|
"msg.attach(attachment)\n",
|
|
"\n",
|
|
"# Send the email using AWS SES\n",
|
|
"response = ses_client.send_raw_email(\n",
|
|
" \n",
|
|
" RawMessage={'Data': msg.as_string()}\n",
|
|
")"
|
|
]
|
|
}
|
|
],
|
|
"metadata": {
|
|
"kernelspec": {
|
|
"display_name": "tbreport",
|
|
"language": "python",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"codemirror_mode": {
|
|
"name": "ipython",
|
|
"version": 3
|
|
},
|
|
"file_extension": ".py",
|
|
"mimetype": "text/x-python",
|
|
"name": "python",
|
|
"nbconvert_exporter": "python",
|
|
"pygments_lexer": "ipython3",
|
|
"version": "3.13.1"
|
|
},
|
|
"orig_nbformat": 4
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 2
|
|
}
|