Added report generator for thingsboard
This commit is contained in:
114
-c7b4d1e.ipynb
Normal file
114
-c7b4d1e.ipynb
Normal file
File diff suppressed because one or more lines are too long
1
.gitignore
vendored
1
.gitignore
vendored
@@ -2,5 +2,6 @@
|
||||
.DS_Store
|
||||
tb_report/frontend/node_modules/**
|
||||
meshifyDrivers/.DS_Store
|
||||
Report Generator/lambda-python3.12/.aws-sam
|
||||
.DS_Store
|
||||
.DS_Store
|
||||
|
||||
BIN
AWS Lambda Layer/.DS_Store
vendored
BIN
AWS Lambda Layer/.DS_Store
vendored
Binary file not shown.
197
Code Snippets/addNewUsers.ipynb
Normal file
197
Code Snippets/addNewUsers.ipynb
Normal file
@@ -0,0 +1,197 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tb_rest_client.rest_client_pe import *\n",
|
||||
"from tb_rest_client.rest import ApiException\n",
|
||||
"from tb_rest_client.api_client import *\n",
|
||||
"import re, ast, json\n",
|
||||
"from uuid import uuid4"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"url = \"https://www.enxlekkocloud.com\"\n",
|
||||
"username = \"nico.a.melone@gmail.com\"\n",
|
||||
"password = \"9EE#mqb*b6bXV9hJrPYGm&w3q5Y@3acumvvb5isQ\"\n",
|
||||
"userIdToCopy = \"\"\n",
|
||||
"entity_group_id=\"616d62f0-3300-11ef-9c57-29fbfd438c8b\"\n",
|
||||
"default_dashboard = \"c157d8a0-32f9-11ef-9c57-29fbfd438c8b\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"data = [\n",
|
||||
" \"Michael Montgomery - mmontgomery@apollopetro.com\",\n",
|
||||
" \"Jerry Pourciau - jpourciau@apollopetro.com\",\n",
|
||||
" \"Dimitri Menutis - dmenutis@apollopetro.com\",\n",
|
||||
" \"Chris Jean - cjean@apollopetro.com\",\n",
|
||||
" \"Josh Spence - jspence@apollopetro.com\"\n",
|
||||
" ]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def checkUserExists(userEmail, rest_client):\n",
|
||||
" resp = rest_client.get_user_users(page_size=100, page=0,text_search=userEmail)\n",
|
||||
" resp = resp.to_dict()\n",
|
||||
" if resp[\"total_elements\"] > 0:\n",
|
||||
" return True\n",
|
||||
" return False"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def copyUser(userIdToCopy, rest_client):\n",
|
||||
" resp = rest_client.get_user_by_id(userIdToCopy)\n",
|
||||
" resp = resp.to_dict()\n",
|
||||
" del resp[\"id\"]\n",
|
||||
" del resp[\"tenant_id\"]\n",
|
||||
" del resp[\"created_time\"]\n",
|
||||
" return resp\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def process_data(data,entity_group_id,default_dashboard):\n",
|
||||
" result = []\n",
|
||||
" for item in data:\n",
|
||||
" parts = item.split(' - ')\n",
|
||||
" first_last_name = parts[0].split()\n",
|
||||
" first_name = first_last_name[0]\n",
|
||||
" last_name = ' '.join(first_last_name[1:])\n",
|
||||
" email = parts[1]\n",
|
||||
" phone = ''\n",
|
||||
" if len(parts) > 2:\n",
|
||||
" phone = '+' + re.sub(r'\\D', '', parts[2])\n",
|
||||
" \n",
|
||||
" owner_id = {\n",
|
||||
" \"id\": entity_group_id,\n",
|
||||
" \"entity_type\": \"CUSTOMER\"\n",
|
||||
" }\n",
|
||||
" if default_dashboard:\n",
|
||||
" additionalInfo = {\n",
|
||||
" \"description\": \"\",\n",
|
||||
" \"defaultDashboardId\": default_dashboard,\n",
|
||||
" \"defaultDashboardFullscreen\": False,\n",
|
||||
" \"homeDashboardId\": default_dashboard,\n",
|
||||
" \"homeDashboardHideToolbar\": False,\n",
|
||||
" \"userCredentialsEnabled\": True\n",
|
||||
" }\n",
|
||||
" else:\n",
|
||||
" additionalInfo = {}\n",
|
||||
" \n",
|
||||
" result.append({\n",
|
||||
" \"email\": email,\n",
|
||||
" \"authority\": \"CUSTOMER_USER\",\n",
|
||||
" \"firstName\": first_name,\n",
|
||||
" \"lastName\": last_name,\n",
|
||||
" \"phone\": phone,\n",
|
||||
" \"additionalInfo\": additionalInfo,\n",
|
||||
" \"ownerId\": owner_id\n",
|
||||
" })\n",
|
||||
" \n",
|
||||
" return result"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"filtered_users = process_data(data, entity_group_id=entity_group_id,default_dashboard=default_dashboard)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 16,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"with RestClientPE(base_url=url) as rest_client:\n",
|
||||
" try:\n",
|
||||
" rest_client.login(username=username, password=password)\n",
|
||||
" if userIdToCopy:\n",
|
||||
" templateUser = copyUser(userIdToCopy=userIdToCopy, rest_client=rest_client)\n",
|
||||
" templateUser[\"additionalInfo\"] = ast.literal_eval(templateUser['additional_info'].replace(\"'\", '\"'))\n",
|
||||
" del templateUser[\"additional_info\"]\n",
|
||||
" else:\n",
|
||||
" templateUser = {\n",
|
||||
" \"email\": \"user@example.com\",\n",
|
||||
" \"authority\": \"CUSTOMER_USER\",\n",
|
||||
" \"firstName\": \"John\",\n",
|
||||
" \"lastName\": \"Doe\",\n",
|
||||
" \"phone\": \"38012345123\",\n",
|
||||
" \"additionalInfo\": {},\n",
|
||||
" \"ownerId\": {\n",
|
||||
" \"id\": \"efe3a0d0-bb6b-11ec-9326-ad8278896f52\",\n",
|
||||
" \"entityType\": \"CUSTOMER\"\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" for user in filtered_users:\n",
|
||||
" if not checkUserExists(user[\"email\"], rest_client=rest_client):\n",
|
||||
" \n",
|
||||
" \"\"\"\n",
|
||||
" templateUser[\"email\"] = user[\"email\"]\n",
|
||||
" templateUser[\"name\"] = user[\"email\"]\n",
|
||||
" templateUser[\"firstName\"] = user[\"information\"][\"first\"]\n",
|
||||
" templateUser[\"lastName\"] = user[\"information\"][\"last\"]\n",
|
||||
" if user[\"phone\"]:\n",
|
||||
" templateUser[\"phone\"] = \"+1\" + \"\".join(user[\"phone\"].split(\"-\"))\n",
|
||||
" else:\n",
|
||||
" templateUser[\"phone\"] = \"\"\n",
|
||||
" \"\"\"\n",
|
||||
" #print(json.dumps(user, indent=4))\n",
|
||||
" rest_client.save_user(send_activation_mail=True, body=user, entity_group_id=entity_group_id)\n",
|
||||
" except ApiException as e:\n",
|
||||
" print(e)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "thingsboard",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
91
Code Snippets/add_alarms.py
Normal file
91
Code Snippets/add_alarms.py
Normal file
@@ -0,0 +1,91 @@
|
||||
import json,uuid
|
||||
|
||||
# Load the existing JSON data from the file
|
||||
path = "ek_chemical.json"
|
||||
with open(path, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Define the new keys to add as alarms
|
||||
new_keys = {
|
||||
"vfd_01_faulted_alm": {"severity": "CRITICAL", "alarmType": "VFD 01 Faulted Alarm"},
|
||||
"vfd_01_status_alm": {"severity": "CRITICAL", "alarmType": "VFD 01 Status Alarm"},
|
||||
"vfd_01_undervoltage_alm": {"severity": "CRITICAL", "alarmType": "VFD 01 Undervoltage Alarm"},
|
||||
"vfd_02_faulted_alm": {"severity": "CRITICAL", "alarmType": "VFD 02 Faulted Alarm"},
|
||||
"vfd_02_status_alm": {"severity": "CRITICAL", "alarmType": "VFD 02 Status Alarm"},
|
||||
"vfd_02_undervoltage_alm": {"severity": "CRITICAL", "alarmType": "VFD 02 Undervoltage Alarm"}
|
||||
}
|
||||
def checkDuplicates(key, alarms):
|
||||
if alarms:
|
||||
for alarm in alarms:
|
||||
if alarm["clearRule"]["condition"]["condition"][0]["key"]["key"] == key:
|
||||
return False
|
||||
return True
|
||||
|
||||
# Loop through the new keys and create a new alarm based on the existing ones
|
||||
for key, value in new_keys.items():
|
||||
if checkDuplicates(key, data["profileData"]["alarms"]):
|
||||
id = str(uuid.uuid4())
|
||||
createRules = {
|
||||
value["severity"]: {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {"type": "TIME_SERIES", "key": key},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": None,
|
||||
"predicate": {"type": "BOOLEAN", "operation": "EQUAL", "value": {"defaultValue": True, "userValue": None, "dynamicValue": None}}
|
||||
}
|
||||
],
|
||||
"spec": {"type": "SIMPLE"}
|
||||
},
|
||||
"schedule": None,
|
||||
"alarmDetails": "",
|
||||
"dashboardId": None
|
||||
}
|
||||
}
|
||||
clearRule = {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {"type": "TIME_SERIES", "key": key},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": None,
|
||||
"predicate": {"type": "BOOLEAN", "operation": "EQUAL", "value": {"defaultValue": False, "userValue": None, "dynamicValue": None}}
|
||||
}
|
||||
],
|
||||
"spec": {"type": "DURATION", "unit": "MINUTES", "predicate": {"defaultValue": 30, "userValue": None, "dynamicValue": None}}
|
||||
},
|
||||
"schedule": None,
|
||||
"alarmDetails": None,
|
||||
"dashboardId": None
|
||||
}
|
||||
propagate = False
|
||||
propagateToOwner = False
|
||||
propagateToOwnerHierarchy = False
|
||||
propagateToTenant = False
|
||||
propagateRelationTypes = None
|
||||
|
||||
# Create a new alarm with the updated createRules and clearRule
|
||||
new_alarm = {
|
||||
"id": id,
|
||||
"alarmType": value["alarmType"],
|
||||
"createRules": createRules,
|
||||
"clearRule": clearRule,
|
||||
"propagate": propagate,
|
||||
"propagateToOwner": propagateToOwner,
|
||||
"propagateToOwnerHierarchy": propagateToOwnerHierarchy,
|
||||
"propagateToTenant": propagateToTenant,
|
||||
"propagateRelationTypes": propagateRelationTypes
|
||||
}
|
||||
|
||||
# Add the new alarm to the existing alarms array
|
||||
if not data["profileData"]["alarms"]:
|
||||
data["profileData"]["alarms"] = []
|
||||
data["profileData"]["alarms"].append(new_alarm)
|
||||
print(f"Added {value['severity']} alarm for {key}")
|
||||
else:
|
||||
print(f"Skipped {key}")
|
||||
# Save the updated JSON data back to the file
|
||||
with open(path, 'w') as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
4458
Code Snippets/alarm_data.json
Normal file
4458
Code Snippets/alarm_data.json
Normal file
File diff suppressed because it is too large
Load Diff
37
Code Snippets/csv-to-kmz.py
Normal file
37
Code Snippets/csv-to-kmz.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import csv
|
||||
import simplekml
|
||||
|
||||
# Create a new KML document
|
||||
kml = simplekml.Kml()
|
||||
|
||||
# Open the CSV file and read its contents
|
||||
with open('inputcoords.csv', 'r') as csvfile:
|
||||
reader = csv.reader(csvfile, delimiter=';')
|
||||
next(reader) # Skip the header row
|
||||
|
||||
# Loop through each device in the CSV file
|
||||
for row in reader:
|
||||
name = row[0]
|
||||
lat_tel = float(row[1]) if row[1] else None
|
||||
lon_tel = float(row[2]) if row[2] else None
|
||||
lat_att = float(row[3]) if row[3] else None
|
||||
lon_att = float(row[4]) if row[4] else None
|
||||
|
||||
# Ignore devices with "Gateway" in the name
|
||||
if "Gateway" in name or "Camera Trailer" in name:
|
||||
continue
|
||||
|
||||
# Prefer latitude and longitude from "tel" columns if available
|
||||
lat = lat_tel or lat_att
|
||||
lon = lon_tel or lon_att
|
||||
|
||||
# If no coordinates are available, print a warning message
|
||||
if not (lat and lon):
|
||||
print(f"No coordinates for device: {name}")
|
||||
continue
|
||||
|
||||
# Create a new placemark for the device
|
||||
pnt = kml.newpoint(name=name, coords=[(lon, lat)])
|
||||
|
||||
# Save the KML document to a file (KMZ file)
|
||||
kml.savekmz("devices_tb.kmz")
|
||||
BIN
Code Snippets/devices_tb.kmz
Normal file
BIN
Code Snippets/devices_tb.kmz
Normal file
Binary file not shown.
483
Code Snippets/ek_chemical.json
Normal file
483
Code Snippets/ek_chemical.json
Normal file
@@ -0,0 +1,483 @@
|
||||
{
|
||||
"name": "ek_chemical",
|
||||
"description": "",
|
||||
"image": null,
|
||||
"type": "DEFAULT",
|
||||
"transportType": "DEFAULT",
|
||||
"provisionType": "DISABLED",
|
||||
"defaultRuleChainId": {
|
||||
"entityType": "RULE_CHAIN",
|
||||
"id": "f773e6b0-fcf1-11ee-bef7-5131a0fcf1e6"
|
||||
},
|
||||
"defaultDashboardId": null,
|
||||
"defaultQueueName": "Main",
|
||||
"profileData": {
|
||||
"configuration": {
|
||||
"type": "DEFAULT"
|
||||
},
|
||||
"transportConfiguration": {
|
||||
"type": "DEFAULT"
|
||||
},
|
||||
"provisionConfiguration": {
|
||||
"type": "DISABLED",
|
||||
"provisionDeviceSecret": null
|
||||
},
|
||||
"alarms": [
|
||||
{
|
||||
"id": "b630bd0c-7bd1-429b-ae11-b5d62317b22c",
|
||||
"alarmType": "VFD 01 Faulted Alarm",
|
||||
"createRules": {
|
||||
"CRITICAL": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_01_faulted_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": true,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "SIMPLE"
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": "",
|
||||
"dashboardId": null
|
||||
}
|
||||
},
|
||||
"clearRule": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_01_faulted_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": false,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "DURATION",
|
||||
"unit": "MINUTES",
|
||||
"predicate": {
|
||||
"defaultValue": 30,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": null,
|
||||
"dashboardId": null
|
||||
},
|
||||
"propagate": false,
|
||||
"propagateToOwner": false,
|
||||
"propagateToOwnerHierarchy": false,
|
||||
"propagateToTenant": false,
|
||||
"propagateRelationTypes": null
|
||||
},
|
||||
{
|
||||
"id": "c1af9cd2-b66a-42a2-8927-3e3e632202d3",
|
||||
"alarmType": "VFD 01 Status Alarm",
|
||||
"createRules": {
|
||||
"CRITICAL": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_01_status_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": true,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "SIMPLE"
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": "",
|
||||
"dashboardId": null
|
||||
}
|
||||
},
|
||||
"clearRule": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_01_status_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": false,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "DURATION",
|
||||
"unit": "MINUTES",
|
||||
"predicate": {
|
||||
"defaultValue": 30,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": null,
|
||||
"dashboardId": null
|
||||
},
|
||||
"propagate": false,
|
||||
"propagateToOwner": false,
|
||||
"propagateToOwnerHierarchy": false,
|
||||
"propagateToTenant": false,
|
||||
"propagateRelationTypes": null
|
||||
},
|
||||
{
|
||||
"id": "02423836-58c6-4a98-83dd-f54a32b9a03d",
|
||||
"alarmType": "VFD 01 Undervoltage Alarm",
|
||||
"createRules": {
|
||||
"CRITICAL": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_01_undervoltage_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": true,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "SIMPLE"
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": "",
|
||||
"dashboardId": null
|
||||
}
|
||||
},
|
||||
"clearRule": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_01_undervoltage_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": false,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "DURATION",
|
||||
"unit": "MINUTES",
|
||||
"predicate": {
|
||||
"defaultValue": 30,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": null,
|
||||
"dashboardId": null
|
||||
},
|
||||
"propagate": false,
|
||||
"propagateToOwner": false,
|
||||
"propagateToOwnerHierarchy": false,
|
||||
"propagateToTenant": false,
|
||||
"propagateRelationTypes": null
|
||||
},
|
||||
{
|
||||
"id": "badde6a4-8aa2-45b4-9b45-25b47f6fe817",
|
||||
"alarmType": "VFD 02 Faulted Alarm",
|
||||
"createRules": {
|
||||
"CRITICAL": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_02_faulted_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": true,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "SIMPLE"
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": "",
|
||||
"dashboardId": null
|
||||
}
|
||||
},
|
||||
"clearRule": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_02_faulted_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": false,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "DURATION",
|
||||
"unit": "MINUTES",
|
||||
"predicate": {
|
||||
"defaultValue": 30,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": null,
|
||||
"dashboardId": null
|
||||
},
|
||||
"propagate": false,
|
||||
"propagateToOwner": false,
|
||||
"propagateToOwnerHierarchy": false,
|
||||
"propagateToTenant": false,
|
||||
"propagateRelationTypes": null
|
||||
},
|
||||
{
|
||||
"id": "c6f5ceee-097b-4cdc-ab5b-f881700a6a07",
|
||||
"alarmType": "VFD 02 Status Alarm",
|
||||
"createRules": {
|
||||
"CRITICAL": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_02_status_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": true,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "SIMPLE"
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": "",
|
||||
"dashboardId": null
|
||||
}
|
||||
},
|
||||
"clearRule": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_02_status_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": false,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "DURATION",
|
||||
"unit": "MINUTES",
|
||||
"predicate": {
|
||||
"defaultValue": 30,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": null,
|
||||
"dashboardId": null
|
||||
},
|
||||
"propagate": false,
|
||||
"propagateToOwner": false,
|
||||
"propagateToOwnerHierarchy": false,
|
||||
"propagateToTenant": false,
|
||||
"propagateRelationTypes": null
|
||||
},
|
||||
{
|
||||
"id": "0ad5d0ab-4f0a-48b3-9d0b-57919a475d84",
|
||||
"alarmType": "VFD 02 Undervoltage Alarm",
|
||||
"createRules": {
|
||||
"CRITICAL": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_02_undervoltage_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": true,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "SIMPLE"
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": "",
|
||||
"dashboardId": null
|
||||
}
|
||||
},
|
||||
"clearRule": {
|
||||
"condition": {
|
||||
"condition": [
|
||||
{
|
||||
"key": {
|
||||
"type": "TIME_SERIES",
|
||||
"key": "vfd_02_undervoltage_alm"
|
||||
},
|
||||
"valueType": "BOOLEAN",
|
||||
"value": null,
|
||||
"predicate": {
|
||||
"type": "BOOLEAN",
|
||||
"operation": "EQUAL",
|
||||
"value": {
|
||||
"defaultValue": false,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"spec": {
|
||||
"type": "DURATION",
|
||||
"unit": "MINUTES",
|
||||
"predicate": {
|
||||
"defaultValue": 30,
|
||||
"userValue": null,
|
||||
"dynamicValue": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"schedule": null,
|
||||
"alarmDetails": null,
|
||||
"dashboardId": null
|
||||
},
|
||||
"propagate": false,
|
||||
"propagateToOwner": false,
|
||||
"propagateToOwnerHierarchy": false,
|
||||
"propagateToTenant": false,
|
||||
"propagateRelationTypes": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"provisionDeviceKey": null,
|
||||
"firmwareId": null,
|
||||
"softwareId": null,
|
||||
"defaultEdgeRuleChainId": null,
|
||||
"default": false
|
||||
}
|
||||
4458
Code Snippets/ek_facility.json
Normal file
4458
Code Snippets/ek_facility.json
Normal file
File diff suppressed because it is too large
Load Diff
307
Code Snippets/inputcoords.csv
Normal file
307
Code Snippets/inputcoords.csv
Normal file
@@ -0,0 +1,307 @@
|
||||
Name;latitude tel;longitude tel;latitude att;longitude att
|
||||
"12"" Flow Meter";31.939630;-104.030778;;
|
||||
1701 1;;;32.008171;-102.337949
|
||||
1701 10;;;;
|
||||
1701 11;;;32.007424;-102.347182
|
||||
1701 12;;;;
|
||||
1701 2;;;;
|
||||
1701 9;;;32.007593;-102.346486
|
||||
1701 Gateway;;;;
|
||||
1701 Pond;;;32.007794;-102.339572
|
||||
AA-101;;;32.014750;-102.220396
|
||||
AA Pond;;;32.012669;-102.223246
|
||||
AA Transfer;;;;
|
||||
AdvVFDIPP #3;;;;
|
||||
AL-101;;;32.287894;-102.353608
|
||||
AL-901;;;32.274323;-102.368214
|
||||
AS-11;;;32.074713;-102.180584
|
||||
AT-101;;;32.055767;-102.191373
|
||||
AU-3401;;;32.051447;-102.369505
|
||||
Aurora 10;;;31.329965;-102.042210
|
||||
Aurora 11;;;31.328801;-102.048474
|
||||
Aurora 6;;;31.333529;-102.024099
|
||||
Aurora 7;;;31.333874;-102.024464
|
||||
Aurora 8;;;31.332901;-102.026872
|
||||
Aurora 9;;;31.332249;-102.030431
|
||||
AV-701;;;32.024790;-102.240240
|
||||
AW-3401;;;32.091929;-102.241150
|
||||
AW-901;;;32.069396;-102.245434
|
||||
AW Battery;;;32.090579;-102.244011
|
||||
Banay WW 18 #8;;;31.675614;-102.243897
|
||||
Banay WW 7-2;;;31.677993;-102.252632
|
||||
Barnett 19-2 WW;;;31.350333;-102.061960
|
||||
Barnett 24-1;;;31.329354;-102.070197
|
||||
Baylee Remote Booster;;;31.558139;-102.172528
|
||||
BE-51;;;;
|
||||
BE-601;;;32.056301;-102.277344
|
||||
BE-71;;;32.038507;-102.263096
|
||||
BE Pond;;;;
|
||||
BI-31;;;32.034010;-102.323120
|
||||
BJ-101;;;32.067810;-102.298672
|
||||
BJ-701;;;32.073971;-102.272705
|
||||
BK-1701;;;32.055834;-102.316510
|
||||
BK-801;;;32.075204;-102.335508
|
||||
BL-3201;;;32.050612;-102.341836
|
||||
BL-3401;;;32.068716;-102.358516
|
||||
BM-1501;;;32.134943;-102.267789
|
||||
BN-2202;;;32.111258;-102.254015
|
||||
BN-301;;;32.118530;-102.270160
|
||||
BN Pond;;;32.112270;-102.257490
|
||||
BP-201;;;32.131795;-102.294616
|
||||
BP-601;;;32.128150;-102.308500
|
||||
BP Battery;32.119443;-102.301507;32.119443;-102.301507
|
||||
BP Compressor;;;;
|
||||
BP Inlet;32.131920;-102.287505;;
|
||||
BP Pond;;;32.132034;-102.288982
|
||||
BQ-301;;;32.093284;-102.305847
|
||||
BQ-41;;;32.114358;-102.299758
|
||||
BV-101;;;32.178548;-102.281074
|
||||
BV-4601;;;32.198494;-102.273250
|
||||
BV-601;;;32.157367;-102.274653
|
||||
BV-602;;;32.155210;-102.258540
|
||||
BV Battery;31.965177;-102.123915;31.965177;-102.123915
|
||||
BW-041;;;32.151020;-102.298150
|
||||
BW-72;;;32.150861;-102.315472
|
||||
BX-101;;;32.173490;-102.317370
|
||||
BX-1601;;;32.168472;-102.334750
|
||||
BX-901;;;32.185633;-102.313754
|
||||
BX-902;;;32.176339;-102.300210
|
||||
BY-501;;;32.209417;-102.304694
|
||||
BZ-201;;;32.184045;-102.329931
|
||||
BZ-202;;;32.197854;-102.324920
|
||||
Caden WW #1;;;31.424971;-102.011564
|
||||
Caden WW #2;;;31.426582;-102.004652
|
||||
Caden WW #3;;;31.428291;-101.997159
|
||||
Caden WW #4;;;31.429292;-101.992556
|
||||
Caden WW #6;;;31.428559;-101.991219
|
||||
Camera Trailer 100;31.142255;-103.147965;;
|
||||
Camera Trailer 101;31.965302;-102.124730;;
|
||||
Camera Trailer 102;32.313893;-102.367877;;
|
||||
Camera Trailer 103;31.966095;-102.125094;;
|
||||
Camera Trailer 104;31.966085;-102.125123;;
|
||||
Camera Trailer 105;31.965380;-102.124730;;
|
||||
Camera Trailer 106;31.966133;-102.125096;;
|
||||
Camera Trailer 107;32.102407;-103.823707;;
|
||||
Camera Trailer 108;31.968196;-102.126852;;
|
||||
Camera Trailer 109;31.623276;-103.631850;;
|
||||
Camera Trailer 110;32.153607;-102.272630;;
|
||||
Camera Trailer 111;31.965916;-102.125094;;
|
||||
Camera Trailer 112;31.965927;-102.125056;;
|
||||
Camera Trailer 113;31.949004;-102.160070;;
|
||||
Camera Trailer 114;31.965885;-102.125070;;
|
||||
Camera Trailer 115;31.965828;-102.125046;;
|
||||
Camera Trailer 116;32.109723;-103.541167;;
|
||||
Camera Trailer 117;32.153028;-103.588695;;
|
||||
Camera Trailer 118;32.066612;-103.782402;;
|
||||
Camera Trailer 119;31.966031;-102.125101;;
|
||||
Camera Trailer 120;32.304702;-101.872473;;
|
||||
Camera Trailer 121;32.112238;-102.527553;;
|
||||
Camera Trailer 122;32.208428;-102.360184;;
|
||||
Camera Trailer 123;31.965646;-102.124758;;
|
||||
Camera Trailer 125;31.965985;-102.125090;;
|
||||
Camera Trailer 126;31.965331;-102.124706;;
|
||||
Camera Trailer 127;31.966071;-102.125113;;
|
||||
Camera Trailer 128;32.077878;-103.909418;;
|
||||
Camera Trailer 129;31.965685;-102.124829;;
|
||||
Camera Trailer 130;31.965685;-102.124842;;
|
||||
Camera Trailer 200;32.207930;-103.876821;;
|
||||
Camera Trailer 201;32.313567;-101.831447;;
|
||||
Camera Trailer 202;32.313282;-101.823884;;
|
||||
Camera Trailer 203;32.210359;-103.871380;;
|
||||
Camera Trailer 204;32.015870;-101.826887;;
|
||||
Camera Trailer 205;30.228071;-95.526871;;
|
||||
Camera Trailer 206;32.178407;-102.280877;;
|
||||
Camera Trailer 300;31.965136;-102.124625;;
|
||||
Camera Trailer 301;31.965156;-102.124603;;
|
||||
Camera Trailer 302;32.207931;-103.876112;;
|
||||
Camera Trailer 303;31.965226;-102.124314;;
|
||||
Camera Trailer 304;31.965134;-102.124679;;
|
||||
Camera Trailer 305;32.210167;-103.872161;;
|
||||
Camera Trailer 306;32.102767;-102.225360;;
|
||||
Camera Trailer 307;32.304978;-101.873035;;
|
||||
Camera Trailer 308;32.280076;-102.299571;;
|
||||
Camera Trailer 309;32.150764;-102.272134;;
|
||||
Camera Trailer 310;32.023837;-102.029614;;
|
||||
Camera Trailer 311;32.312862;-101.831165;;
|
||||
Camera Trailer 312;32.482383;-101.859808;;
|
||||
Camera Trailer 313;32.471947;-101.935862;;
|
||||
CF-1501;;;32.254407;-102.289776
|
||||
CF-1601;;;32.253675;-102.321603
|
||||
CF-2101;;;32.238472;-102.284806
|
||||
CF-501;;;32.288194;-102.332453
|
||||
CF-701;;;32.271506;-102.327233
|
||||
CF-801;;;32.266433;-102.305903
|
||||
CF Inlet;32.265871;-102.342135;;
|
||||
CF Pond;;;32.265197;-102.341933
|
||||
CG-1001;;;32.257922;-102.362356
|
||||
CG-1101;;;32.240281;-102.357294
|
||||
CG-601;;;32.253131;-102.342499
|
||||
CG-701;;;32.269994;-102.347869
|
||||
Chuda Flow Meter #1;;;32.487164;-101.870672
|
||||
Chuda Flow Meter #2;;;32.486408;-101.865952
|
||||
Chuda Flow Meter #3;;;32.484286;-101.867593
|
||||
Chuda Flow Meter #4;;;32.487430;-101.862969
|
||||
CI-1301;;;32.208100;-102.361340
|
||||
CI-1302;;;32.222400;-102.350200
|
||||
CI-401;;;32.216214;-102.330655
|
||||
CI-501;;;32.236507;-102.337326
|
||||
CJ-1801;;;32.243756;-102.305267
|
||||
CJ-2201;;;32.226575;-102.313603
|
||||
CrossBarRanch #3116 WS;;;32.105892;-102.195275
|
||||
Davis Check Meter;;;30.385079;-100.398005
|
||||
Dawn #2;;;31.427301;-102.119062
|
||||
Dawn #3;;;31.423635;-102.121751
|
||||
Dawn to Terri;;;31.426996;-102.118992
|
||||
EKKO 1;;;;
|
||||
FB-501;;;32.035592;-102.204997
|
||||
Fee BM Battery;;;32.141825;-102.253097
|
||||
Florence WW #1;;;31.389539;-101.977307
|
||||
Florence WW #2;;;31.389809;-101.974169
|
||||
Flow Meter 11;31.965532;-102.124231;;
|
||||
Flow Meter 12;31.965514;-102.124253;;
|
||||
Flow Meter 6;31.939516;-104.030772;;
|
||||
Foundation Check Meter;;;30.401581;-100.807881
|
||||
Francis Hill Check Meter;;;30.267756;-100.541178
|
||||
Glasscock Check Meter;;;30.601340;-100.993300
|
||||
Great Western Check Meter;;;30.429062;-100.802917
|
||||
Headlee 3401 WS;31.969551;-102.297012;31.969551;-102.297012
|
||||
"HP 10"" 60K #1";31.618667;-102.120201;;
|
||||
"HP 12"" 40K #1 US";31.618745;-102.119999;;
|
||||
"HP 12"" 40K #2";31.618721;-102.119852;;
|
||||
"HP 12"" 40K #3";31.618723;-102.119812;;
|
||||
"HP 12"" 60K #2";31.620732;-102.121136;;
|
||||
"HP 12"" 60K #3";31.620829;-102.121155;;
|
||||
"HP 8"" 40K #4 R";31.620769;-102.121099;;
|
||||
HP Test Location;;;32.000000;-102.000000
|
||||
HP Test Location 2;;;31.965225;-102.124103
|
||||
Jessica WW #1;;;31.374467;-101.963489
|
||||
Jessica WW #2;;;31.368209;-101.961262
|
||||
Jessica WW #3;;;31.368043;-101.960620
|
||||
Jessica WW #4;;;31.384569;-101.956347
|
||||
Jessica WW #5;;;31.381946;-101.955482
|
||||
Jessica WW #7;;;31.377569;-101.953845
|
||||
Kate A1;;;31.346283;-102.086868
|
||||
Kate A2;;;31.341675;-102.084076
|
||||
Kate B1;;;31.335507;-102.081482
|
||||
KD #2;;;31.474076;-102.197786
|
||||
KD #4;;;31.479206;-102.201117
|
||||
KD #5;;;31.474872;-102.209439
|
||||
KD #7;;;31.479147;-102.216474
|
||||
Kelsey Pit;;;31.516559;-102.166829
|
||||
Laney A #2;;;31.356207;-102.052619
|
||||
Laurie Gwen Transfer;;;31.425452;-101.987885
|
||||
Laurie Gwen WW #1;;;31.414602;-101.998772
|
||||
Lisa Water Transfer;;;31.420228;-101.968562
|
||||
Lisa WW #1;;;31.419889;-101.975526
|
||||
Lively Check Meter;;;30.401581;-100.807881
|
||||
LUDEMAN #1;31.921785;-103.424975;;
|
||||
"Mabee 16""";31.620073;-102.120593;;
|
||||
Madeline WW #2;;;31.394942;-101.992428
|
||||
Madelyn Kate #3 WW;;;31.390507;-101.986781
|
||||
Mann Check Meter;;;30.228770;-100.379522
|
||||
Mary 43 #1;;;31.481471;-102.199637
|
||||
Mary 43 #2;;;31.457938;-102.197870
|
||||
Mary 43 #3;;;31.477186;-102.186897
|
||||
Mary 43 #5;;;31.474287;-102.187780
|
||||
Monique #1;;;31.430164;-102.126619
|
||||
Monique #2;;;31.426953;-102.126039
|
||||
Monique #3;;;31.429928;-102.125681
|
||||
Nancy Pit;;;31.513194;-102.230482
|
||||
Office Water Management;;;;
|
||||
P2P 2326;31.809850;-102.144562;;
|
||||
P2P #3;31.620072;-102.120629;;
|
||||
P2P CROSS L;31.617627;-102.148728;;
|
||||
P2P HP ;31.618782;-102.119069;;
|
||||
"Parks Inlet #1- 10""";31.816311;-102.138278;;
|
||||
"Parks Inlet #2 - 10""";31.816290;-102.138280;;
|
||||
"PARKS OUTLET - 10""";31.814745;-102.139394;;
|
||||
Pond A;;;31.987505;-102.318024
|
||||
Pond A Gateway;;;;
|
||||
Power Plant Transfer;;;;
|
||||
Rachel Gwen Transfer;;;31.393684;-101.961127
|
||||
Ratliff Prod Well 27;;;31.987753;-102.344529
|
||||
Ratliff Well 28;;;31.983872;-102.345029
|
||||
Ratliff Well 29;;;31.983482;-102.344455
|
||||
Ratliff Well 31;;;31.987715;-102.327278
|
||||
Ratliff Well 36;;;;
|
||||
Ratliff Well 38;;;;
|
||||
Ratliff Well 42;;;31.964041;-102.332986
|
||||
Ratliff Well 45;;;31.962430;-102.340150
|
||||
Ratliff Well A 30;;;31.988380;-102.328844
|
||||
Ratliff Well A 32;;;32.003628;-102.323035
|
||||
Ratliff Well A 33;;;31.997734;-102.321101
|
||||
Ratliff Well A 34;;;31.987116;-102.317512
|
||||
Ratliff Well A 35;;;31.985142;-102.316897
|
||||
Ratliff Well A 37;;;31.982706;-102.316073
|
||||
Ratliff Well A 39;;;31.981049;-102.315328
|
||||
Ratliff Well A 40;;;31.981164;-102.315928
|
||||
Ratliff Well A 41;;;31.965060;-102.330468
|
||||
Ratliff Well A 43;;;31.963629;-102.333210
|
||||
Ratliff Well A 44;;;31.962098;-102.339928
|
||||
Ratliff Well A 46;;;31.963278;-102.340511
|
||||
Ratliff Well B 36;;;;
|
||||
Rhonda Pit;;;31.567326;-102.285167
|
||||
Rig Pump #03;31.809937;-101.762311;31.902849;-101.877387
|
||||
Rig Pump #04;32.342715;-101.671300;32.320571;-101.695251
|
||||
Rig Pump #06;32.015579;-101.826784;31.902959;-101.877407
|
||||
Rig Pump #07;32.337589;-101.891645;31.867502;-101.819380
|
||||
Rig Pump #08;32.008296;-101.840682;32.008296;-101.840682
|
||||
Rig Pump #10;32.058536;-101.681551;32.210607;-101.630561
|
||||
Rig Pump #11;31.848657;-101.766952;31.870703;-101.756014
|
||||
Rig Pump #12;32.015573;-101.826779;31.872968;-101.752086
|
||||
Rig Pump #13;32.457135;-101.783249;32.337629;-101.891645
|
||||
S-601;;;32.219803;-102.280277
|
||||
"Single 12""";31.860729;-102.303110;;
|
||||
Sonya;;;31.750722;-102.046389
|
||||
Stephanie 41 #2;;;31.507569;-102.173839
|
||||
Stephanie 41 #3;;;31.505766;-102.181310
|
||||
Terri #1;;;31.476535;-102.178382
|
||||
Terri #2;;;31.481768;-102.180497
|
||||
Terri #3;;;31.489832;-102.184831
|
||||
Terri #4;;;31.490261;-102.183292
|
||||
Terri #6;;;31.492807;-102.168766
|
||||
Terri Pond;;;31.482556;-102.166806
|
||||
Tessa Lyn;;;31.349740;-102.076933
|
||||
TM1;;;31.498894;-102.205459
|
||||
TM2;;;31.493274;-102.203603
|
||||
TPRW Line;31.621735;-102.120537;;
|
||||
Tree 13;;;32.001432;-102.360229
|
||||
Tree 14;;;;
|
||||
Tree 15;;;32.000017;-102.359792
|
||||
Tree 16;;;;
|
||||
Tree 17;;;;
|
||||
Tree 18;;;;
|
||||
Tree 19;;;;
|
||||
Tree 20;;;;
|
||||
Tree 21;;;31.996249;-102.358587
|
||||
Tree 22;;;31.995313;-102.358832
|
||||
Tree 23;;;31.994288;-102.357783
|
||||
Tree 25;;;;
|
||||
Tree 26;;;31.996235;-102.356192
|
||||
Tree Gateway;;;;
|
||||
Tree Pond;;;31.994993;-102.359162
|
||||
"Triple 4""";31.860729;-102.303110;;
|
||||
Trumann 1;;;31.335545;-102.006756
|
||||
Trumann 2;;;31.335545;-102.006756
|
||||
Trumann 3;;;31.341717;-102.007934
|
||||
Trumann 4;;;31.343077;-102.010384
|
||||
Trumann 5;;;31.348845;-102.008811
|
||||
Valve Controller;;;32.178726;-102.280974
|
||||
WC41-1;;;31.481766;-102.219535
|
||||
WC41-2;;;31.480950;-102.223040
|
||||
WC41-3;;;31.479859;-102.227972
|
||||
Wess Hill Check Meter;;;30.496197;-100.877146
|
||||
Windham 107-1;;;31.381876;-102.122542
|
||||
Windham 107-2;;;31.380609;-102.122112
|
||||
Windham 108-1;;;31.390889;-102.120610
|
||||
Windham 108-10;;;31.389643;-102.122629
|
||||
Windham 108-2;;;31.390139;-102.119854
|
||||
Windham 108-3;;;31.390587;-102.126189
|
||||
Windham 108-5;;;31.390815;-102.127345
|
||||
Windham 108-6;;;31.391496;-102.129272
|
||||
Windham 108-7;;;31.391321;-102.131313
|
||||
Windham 108-8;;;31.386190;-102.127263
|
||||
Windham 108-9;;;31.388959;-102.123217
|
||||
Yvonne Transfer Pump 1;;;31.562778;-102.248611
|
||||
Yvonne Transfer Pump 2;;;31.562778;-102.248611
|
||||
Yvonne Transfer Pump 3;;;31.562778;-102.248611
|
||||
|
18
Code Snippets/mqtt-js-rpc-from-server.js
Normal file
18
Code Snippets/mqtt-js-rpc-from-server.js
Normal file
@@ -0,0 +1,18 @@
|
||||
var mqtt = require('mqtt');
|
||||
var client = mqtt.connect('mqtt://localhost',{
|
||||
username: process.env.TOKEN
|
||||
});
|
||||
|
||||
client.on('connect', function () {
|
||||
console.log('connected');
|
||||
client.subscribe('v1/devices/me/rpc/request/+')
|
||||
client.subscribe('v1/devices/me/attributes')
|
||||
});
|
||||
|
||||
client.on('message', function (topic, message) {
|
||||
console.log('request.topic: ' + topic);
|
||||
console.log('request.body: ' + message.toString());
|
||||
var requestId = topic.slice('v1/devices/me/rpc/request/'.length);
|
||||
//client acts as an echo service
|
||||
client.publish('v1/devices/me/rpc/response/' + requestId, message);
|
||||
});
|
||||
457
Code Snippets/package-lock.json
generated
Normal file
457
Code Snippets/package-lock.json
generated
Normal file
@@ -0,0 +1,457 @@
|
||||
{
|
||||
"name": "Code Snippets",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"mqtt": "^5.7.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/runtime": {
|
||||
"version": "7.24.7",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.7.tgz",
|
||||
"integrity": "sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw==",
|
||||
"dependencies": {
|
||||
"regenerator-runtime": "^0.14.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.14.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz",
|
||||
"integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==",
|
||||
"dependencies": {
|
||||
"undici-types": "~5.26.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/readable-stream": {
|
||||
"version": "4.0.14",
|
||||
"resolved": "https://registry.npmjs.org/@types/readable-stream/-/readable-stream-4.0.14.tgz",
|
||||
"integrity": "sha512-xZn/AuUbCMShGsqH/ehZtGDwQtbx00M9rZ2ENLe4tOjFZ/JFeWMhEZkk2fEe1jAUqqEAURIkFJ7Az/go8mM1/w==",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"safe-buffer": "~5.1.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/ws": {
|
||||
"version": "8.5.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz",
|
||||
"integrity": "sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/abort-controller": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
||||
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
|
||||
"dependencies": {
|
||||
"event-target-shim": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.5"
|
||||
}
|
||||
},
|
||||
"node_modules/base64-js": {
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
|
||||
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/bl": {
|
||||
"version": "6.0.13",
|
||||
"resolved": "https://registry.npmjs.org/bl/-/bl-6.0.13.tgz",
|
||||
"integrity": "sha512-tMncAcpsyjZgAVbVFupVIaB2xud13xxT59fdHkuszY2jdZkqIWfpQdmII1fOe3kOGAz0mNLTIHEm+KxpYsQKKg==",
|
||||
"dependencies": {
|
||||
"@types/readable-stream": "^4.0.0",
|
||||
"buffer": "^6.0.3",
|
||||
"inherits": "^2.0.4",
|
||||
"readable-stream": "^4.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer": {
|
||||
"version": "6.0.3",
|
||||
"resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz",
|
||||
"integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"base64-js": "^1.3.1",
|
||||
"ieee754": "^1.2.1"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-from": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
|
||||
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="
|
||||
},
|
||||
"node_modules/commist": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/commist/-/commist-3.2.0.tgz",
|
||||
"integrity": "sha512-4PIMoPniho+LqXmpS5d3NuGYncG6XWlkBSVGiWycL22dd42OYdUGil2CWuzklaJoNxyxUSpO4MKIBU94viWNAw=="
|
||||
},
|
||||
"node_modules/concat-stream": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz",
|
||||
"integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==",
|
||||
"engines": [
|
||||
"node >= 6.0"
|
||||
],
|
||||
"dependencies": {
|
||||
"buffer-from": "^1.0.0",
|
||||
"inherits": "^2.0.3",
|
||||
"readable-stream": "^3.0.2",
|
||||
"typedarray": "^0.0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/concat-stream/node_modules/readable-stream": {
|
||||
"version": "3.6.2",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
||||
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
"util-deprecate": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.3.5",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz",
|
||||
"integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==",
|
||||
"dependencies": {
|
||||
"ms": "2.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/event-target-shim": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
|
||||
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/events": {
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
|
||||
"integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==",
|
||||
"engines": {
|
||||
"node": ">=0.8.x"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-unique-numbers": {
|
||||
"version": "8.0.13",
|
||||
"resolved": "https://registry.npmjs.org/fast-unique-numbers/-/fast-unique-numbers-8.0.13.tgz",
|
||||
"integrity": "sha512-7OnTFAVPefgw2eBJ1xj2PGGR9FwYzSUso9decayHgCDX4sJkHLdcsYTytTg+tYv+wKF3U8gJuSBz2jJpQV4u/g==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.23.8",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/help-me": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz",
|
||||
"integrity": "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg=="
|
||||
},
|
||||
"node_modules/ieee754": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
|
||||
"integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/inherits": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||
},
|
||||
"node_modules/js-sdsl": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz",
|
||||
"integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==",
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/js-sdsl"
|
||||
}
|
||||
},
|
||||
"node_modules/lru-cache": {
|
||||
"version": "10.3.0",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.3.0.tgz",
|
||||
"integrity": "sha512-CQl19J/g+Hbjbv4Y3mFNNXFEL/5t/KCg8POCuUqd4rMKjGG+j1ybER83hxV58zL+dFI1PTkt3GNFSHRt+d8qEQ==",
|
||||
"engines": {
|
||||
"node": "14 || >=16.14"
|
||||
}
|
||||
},
|
||||
"node_modules/minimist": {
|
||||
"version": "1.2.8",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
|
||||
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/mqtt": {
|
||||
"version": "5.7.3",
|
||||
"resolved": "https://registry.npmjs.org/mqtt/-/mqtt-5.7.3.tgz",
|
||||
"integrity": "sha512-v+5la6Q6zjl0AWsI7ICDA/K3hclkNj7CMa0khMugCC+LKPLrQF+sSQb/9ckezZLMvcBC1tXhRzqmcagQoDl9fQ==",
|
||||
"dependencies": {
|
||||
"@types/readable-stream": "^4.0.5",
|
||||
"@types/ws": "^8.5.9",
|
||||
"commist": "^3.2.0",
|
||||
"concat-stream": "^2.0.0",
|
||||
"debug": "^4.3.4",
|
||||
"help-me": "^5.0.0",
|
||||
"lru-cache": "^10.0.1",
|
||||
"minimist": "^1.2.8",
|
||||
"mqtt": "^5.2.0",
|
||||
"mqtt-packet": "^9.0.0",
|
||||
"number-allocator": "^1.0.14",
|
||||
"readable-stream": "^4.4.2",
|
||||
"reinterval": "^1.1.0",
|
||||
"rfdc": "^1.3.0",
|
||||
"split2": "^4.2.0",
|
||||
"worker-timers": "^7.1.4",
|
||||
"ws": "^8.17.1"
|
||||
},
|
||||
"bin": {
|
||||
"mqtt": "build/bin/mqtt.js",
|
||||
"mqtt_pub": "build/bin/pub.js",
|
||||
"mqtt_sub": "build/bin/sub.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/mqtt-packet": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mqtt-packet/-/mqtt-packet-9.0.0.tgz",
|
||||
"integrity": "sha512-8v+HkX+fwbodsWAZIZTI074XIoxVBOmPeggQuDFCGg1SqNcC+uoRMWu7J6QlJPqIUIJXmjNYYHxBBLr1Y/Df4w==",
|
||||
"dependencies": {
|
||||
"bl": "^6.0.8",
|
||||
"debug": "^4.3.4",
|
||||
"process-nextick-args": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/number-allocator": {
|
||||
"version": "1.0.14",
|
||||
"resolved": "https://registry.npmjs.org/number-allocator/-/number-allocator-1.0.14.tgz",
|
||||
"integrity": "sha512-OrL44UTVAvkKdOdRQZIJpLkAdjXGTRda052sN4sO77bKEzYYqWKMBjQvrJFzqygI99gL6Z4u2xctPW1tB8ErvA==",
|
||||
"dependencies": {
|
||||
"debug": "^4.3.1",
|
||||
"js-sdsl": "4.3.0"
|
||||
}
|
||||
},
|
||||
"node_modules/process": {
|
||||
"version": "0.11.10",
|
||||
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
|
||||
"integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==",
|
||||
"engines": {
|
||||
"node": ">= 0.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/process-nextick-args": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
||||
"integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="
|
||||
},
|
||||
"node_modules/readable-stream": {
|
||||
"version": "4.5.2",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz",
|
||||
"integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==",
|
||||
"dependencies": {
|
||||
"abort-controller": "^3.0.0",
|
||||
"buffer": "^6.0.3",
|
||||
"events": "^3.3.0",
|
||||
"process": "^0.11.10",
|
||||
"string_decoder": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/regenerator-runtime": {
|
||||
"version": "0.14.1",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz",
|
||||
"integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw=="
|
||||
},
|
||||
"node_modules/reinterval": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/reinterval/-/reinterval-1.1.0.tgz",
|
||||
"integrity": "sha512-QIRet3SYrGp0HUHO88jVskiG6seqUGC5iAG7AwI/BV4ypGcuqk9Du6YQBUOUqm9c8pw1eyLoIaONifRua1lsEQ=="
|
||||
},
|
||||
"node_modules/rfdc": {
|
||||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz",
|
||||
"integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA=="
|
||||
},
|
||||
"node_modules/safe-buffer": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
||||
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
|
||||
},
|
||||
"node_modules/split2": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
|
||||
"integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
|
||||
"engines": {
|
||||
"node": ">= 10.x"
|
||||
}
|
||||
},
|
||||
"node_modules/string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/string_decoder/node_modules/safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/tslib": {
|
||||
"version": "2.6.3",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz",
|
||||
"integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ=="
|
||||
},
|
||||
"node_modules/typedarray": {
|
||||
"version": "0.0.6",
|
||||
"resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz",
|
||||
"integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA=="
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "5.26.5",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
|
||||
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
|
||||
},
|
||||
"node_modules/util-deprecate": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
|
||||
},
|
||||
"node_modules/worker-timers": {
|
||||
"version": "7.1.8",
|
||||
"resolved": "https://registry.npmjs.org/worker-timers/-/worker-timers-7.1.8.tgz",
|
||||
"integrity": "sha512-R54psRKYVLuzff7c1OTFcq/4Hue5Vlz4bFtNEIarpSiCYhpifHU3aIQI29S84o1j87ePCYqbmEJPqwBTf+3sfw==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.24.5",
|
||||
"tslib": "^2.6.2",
|
||||
"worker-timers-broker": "^6.1.8",
|
||||
"worker-timers-worker": "^7.0.71"
|
||||
}
|
||||
},
|
||||
"node_modules/worker-timers-broker": {
|
||||
"version": "6.1.8",
|
||||
"resolved": "https://registry.npmjs.org/worker-timers-broker/-/worker-timers-broker-6.1.8.tgz",
|
||||
"integrity": "sha512-FUCJu9jlK3A8WqLTKXM9E6kAmI/dR1vAJ8dHYLMisLNB/n3GuaFIjJ7pn16ZcD1zCOf7P6H62lWIEBi+yz/zQQ==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.24.5",
|
||||
"fast-unique-numbers": "^8.0.13",
|
||||
"tslib": "^2.6.2",
|
||||
"worker-timers-worker": "^7.0.71"
|
||||
}
|
||||
},
|
||||
"node_modules/worker-timers-worker": {
|
||||
"version": "7.0.71",
|
||||
"resolved": "https://registry.npmjs.org/worker-timers-worker/-/worker-timers-worker-7.0.71.tgz",
|
||||
"integrity": "sha512-ks/5YKwZsto1c2vmljroppOKCivB/ma97g9y77MAAz2TBBjPPgpoOiS1qYQKIgvGTr2QYPT3XhJWIB6Rj2MVPQ==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.24.5",
|
||||
"tslib": "^2.6.2"
|
||||
}
|
||||
},
|
||||
"node_modules/ws": {
|
||||
"version": "8.18.0",
|
||||
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz",
|
||||
"integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"bufferutil": "^4.0.1",
|
||||
"utf-8-validate": ">=5.0.2"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"bufferutil": {
|
||||
"optional": true
|
||||
},
|
||||
"utf-8-validate": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
5
Code Snippets/package.json
Normal file
5
Code Snippets/package.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"mqtt": "^5.7.3"
|
||||
}
|
||||
}
|
||||
469
Report Generator/email-reports.ipynb
Normal file
469
Report Generator/email-reports.ipynb
Normal file
@@ -0,0 +1,469 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tb_rest_client.rest_client_pe import *\n",
|
||||
"from tb_rest_client.rest import ApiException\n",
|
||||
"\n",
|
||||
"import json, sys, os\n",
|
||||
"from datetime import datetime as dt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"url = \"https://hp.henrypump.cloud\"\n",
|
||||
"username = \"nmelone@henry-pump.com\"\n",
|
||||
"password = \"gzU6$26v42mU%3jDzTJf\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#Creating Rest Client for ThingsBoard\n",
|
||||
"with RestClientPE(base_url=url) as rest_client:\n",
|
||||
" try:\n",
|
||||
" rest_client.login(username=username, password=password)\n",
|
||||
" #Loading Config from file\n",
|
||||
" with open(\"./config.json\") as f:\n",
|
||||
" config = json.load(f)\n",
|
||||
"\n",
|
||||
" reportData = {}\n",
|
||||
" reportToList = {}\n",
|
||||
" #Loop through each item in config, each item represents a report\n",
|
||||
" for report in config:\n",
|
||||
" reportToList[report[\"name\"]] = report[\"emails\"]\n",
|
||||
" #Each customer becomes it's own xlsx file later\n",
|
||||
" for customer in report[\"customers\"].keys():\n",
|
||||
" #Get all the devices for a given customer\n",
|
||||
" devices = rest_client.get_customer_devices(customer_id=customer, page=0, page_size=100)\n",
|
||||
" #Filter devices to the desired devices\n",
|
||||
" if report[\"filterDevicesIn\"]:\n",
|
||||
" devices.data = [device for device in devices.data if device.id.id in report[\"filterDevicesIn\"]]\n",
|
||||
" if report[\"filterDevicesOut\"]:\n",
|
||||
" devices.data = [device for device in devices.data if device.id.id not in report[\"filterDevicesOut\"]]\n",
|
||||
" #Create the report in reportData if needed\n",
|
||||
" if not reportData.get(report[\"name\"], None):\n",
|
||||
" reportData[report[\"name\"]] = {}\n",
|
||||
" #Go through all the devices and add them and their desired data to reportData \n",
|
||||
" for device in devices.data:\n",
|
||||
" name = device.name\n",
|
||||
" keys = rest_client.get_timeseries_keys_v1(device.id)\n",
|
||||
" #Filter keys to desired keys\n",
|
||||
" for deviceType in report[\"customers\"][customer][\"deviceTypes\"]:\n",
|
||||
" if device.type == deviceType[\"deviceType\"]:\n",
|
||||
" keys = list(filter(lambda x: x in deviceType[\"dataPoints\"], keys))\n",
|
||||
" #Create customer if needed\n",
|
||||
" if not reportData[report[\"name\"]].get(report[\"customers\"][customer][\"name\"], None):\n",
|
||||
" reportData[report[\"name\"]][report[\"customers\"][customer][\"name\"]] = {}\n",
|
||||
" #Check to make sure the deviceType is desired in the report for the given device\n",
|
||||
" if device.type in list(map(lambda x: x[\"deviceType\"], report[\"customers\"][customer][\"deviceTypes\"])):\n",
|
||||
" #Create deviceType if needed\n",
|
||||
" if not reportData[report[\"name\"]][report[\"customers\"][customer][\"name\"]].get(device.type, None):\n",
|
||||
" reportData[report[\"name\"]][report[\"customers\"][customer][\"name\"]][device.type] = {}\n",
|
||||
" reportData[report[\"name\"]][report[\"customers\"][customer][\"name\"]][device.type][device.name] = rest_client.get_latest_timeseries(entity_id=device.id , keys=\",\".join(keys))\n",
|
||||
" except ApiException as e:\n",
|
||||
" print(e)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{\n",
|
||||
" \"CrownQuest-Daily-Report\": {\n",
|
||||
" \"CrownQuest\": {\n",
|
||||
" \"rigpump\": {\n",
|
||||
" \"#07\": {\n",
|
||||
" \"vfd_frequency\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721761800000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"vfd_current\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721761800000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"#03\": {\n",
|
||||
" \"vfd_frequency\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721712600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"vfd_current\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721712600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"#04\": {\n",
|
||||
" \"vfd_frequency\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"vfd_current\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"#08\": {\n",
|
||||
" \"vfd_frequency\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"vfd_current\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"#12\": {\n",
|
||||
" \"vfd_frequency\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772000000,\n",
|
||||
" \"value\": \"40.98\"\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"vfd_current\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772000000,\n",
|
||||
" \"value\": \"43.03\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"#11\": {\n",
|
||||
" \"vfd_frequency\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"vfd_current\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"#06\": {\n",
|
||||
" \"vfd_frequency\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"50.0\"\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"vfd_current\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"34.51\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"#10\": {\n",
|
||||
" \"vfd_frequency\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"vfd_current\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"#13\": {\n",
|
||||
" \"vfd_frequency\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"vfd_current\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"0.0\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" }\n",
|
||||
" },\n",
|
||||
" \"cqwatertanks\": {\n",
|
||||
" \"Office Water Management\": {\n",
|
||||
" \"fm_1_flow_rate\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"165.81\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" },\n",
|
||||
" \"Henry-Petroleum\": {\n",
|
||||
" \"abbflow\": {\n",
|
||||
" \"Davis Check Meter\": {\n",
|
||||
" \"accumulated_volume\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772000000,\n",
|
||||
" \"value\": \"366655.72\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"Great Western Check Meter\": {\n",
|
||||
" \"accumulated_volume\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721716800000,\n",
|
||||
" \"value\": \"45504.62\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"Francis Hill Check Meter\": {\n",
|
||||
" \"accumulated_volume\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"340350.0\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"Foundation Check Meter\": {\n",
|
||||
" \"accumulated_volume\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"434597.88\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"Wess Hill Check Meter\": {\n",
|
||||
" \"accumulated_volume\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"226273.38\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"Lively Check Meter\": {\n",
|
||||
" \"accumulated_volume\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"233637.39\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"Glasscock Check Meter\": {\n",
|
||||
" \"accumulated_volume\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"128745.13\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"Mann Check Meter\": {\n",
|
||||
" \"accumulated_volume\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772600000,\n",
|
||||
" \"value\": \"29861.7\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" },\n",
|
||||
" \"Henry-Petroleum-Daily-Report\": {\n",
|
||||
" \"Henry-Petroleum\": {\n",
|
||||
" \"abbflow\": {\n",
|
||||
" \"Davis Check Meter\": {\n",
|
||||
" \"accumulated_volume\": [\n",
|
||||
" {\n",
|
||||
" \"ts\": 1721772000000,\n",
|
||||
" \"value\": \"366655.72\"\n",
|
||||
" }\n",
|
||||
" ]\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
"}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(json.dumps(reportData, indent=4))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{\n",
|
||||
" \"CrownQuest-Daily-Report\": [\n",
|
||||
" \"nmelone@henry-pump.com\",\n",
|
||||
" \"n_melone@hotmail.com\"\n",
|
||||
" ],\n",
|
||||
" \"Henry-Petroleum-Daily-Report\": [\n",
|
||||
" \"nmelone@henry-pump.com\",\n",
|
||||
" \"nmelone08@gmail.com\"\n",
|
||||
" ]\n",
|
||||
"}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(json.dumps(reportToList, indent=4))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import xlsxwriter\n",
|
||||
"import boto3\n",
|
||||
"from email.mime.multipart import MIMEMultipart\n",
|
||||
"from email.mime.application import MIMEApplication\n",
|
||||
"from email.mime.text import MIMEText\n",
|
||||
"import tempfile\n",
|
||||
"from email import encoders\n",
|
||||
"from email.mime.base import MIMEBase"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Create an AWS SES client\n",
|
||||
"ses_client = boto3.client('ses', region_name='us-east-1')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'MessageId': '01000190e1a66884-79debb77-9a2f-400a-927e-ed2fe315a32c-000000', 'ResponseMetadata': {'RequestId': '212bf456-644c-492d-acf3-d23255222e4d', 'HTTPStatusCode': 200, 'HTTPHeaders': {'date': 'Tue, 23 Jul 2024 22:11:37 GMT', 'content-type': 'text/xml', 'content-length': '338', 'connection': 'keep-alive', 'x-amzn-requestid': '212bf456-644c-492d-acf3-d23255222e4d'}, 'RetryAttempts': 0}}\n",
|
||||
"[<xlsxwriter.workbook.Workbook object at 0x1100dae40>, <xlsxwriter.workbook.Workbook object at 0x105820980>]\n",
|
||||
"{'MessageId': '01000190e1a66958-70b625d5-dec6-45f1-a1d8-155264b5c6f0-000000', 'ResponseMetadata': {'RequestId': '9de5509b-9745-4379-93d1-535e055bf55e', 'HTTPStatusCode': 200, 'HTTPHeaders': {'date': 'Tue, 23 Jul 2024 22:11:38 GMT', 'content-type': 'text/xml', 'content-length': '338', 'connection': 'keep-alive', 'x-amzn-requestid': '9de5509b-9745-4379-93d1-535e055bf55e'}, 'RetryAttempts': 0}}\n",
|
||||
"[<xlsxwriter.workbook.Workbook object at 0x105f2afc0>]\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Create a workbook for each report\n",
|
||||
"for report_name, report_data in reportData.items():\n",
|
||||
" #will generate an email lower down\n",
|
||||
" spreadsheets = []\n",
|
||||
" # Create a worksheet for each company\n",
|
||||
" for company_name, company_data in report_data.items():\n",
|
||||
" workbook = xlsxwriter.Workbook(f\"{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx\")\n",
|
||||
" bold = workbook.add_format({'bold': True})\n",
|
||||
" # Create a sheet for each device type\n",
|
||||
" for device_type, device_data in company_data.items():\n",
|
||||
" worksheet = workbook.add_worksheet(device_type)\n",
|
||||
" \n",
|
||||
" # Set the header row with device types\n",
|
||||
" device_types = list(device_data.keys())\n",
|
||||
" worksheet.write_column(1, 0, device_types,bold)\n",
|
||||
" \n",
|
||||
" # Write the data to the sheet\n",
|
||||
" for i, (telemetry_name, telemetry_data) in enumerate(device_data.items()):\n",
|
||||
" # Set the header row with telemetry names\n",
|
||||
" telemetry_names = list(telemetry_data.keys())\n",
|
||||
" worksheet.write_row(0, 1, telemetry_names, bold)\n",
|
||||
" for j, (data_name, data) in enumerate(telemetry_data.items()):\n",
|
||||
" values = [d[\"value\"] for d in data]\n",
|
||||
" worksheet.write_row(i + 1, j+ 1, values)\n",
|
||||
" worksheet.autofit()\n",
|
||||
" workbook.close()\n",
|
||||
" spreadsheets.append(workbook)\n",
|
||||
" # Create an email message\n",
|
||||
" msg = MIMEMultipart()\n",
|
||||
" msg['Subject'] = report_name\n",
|
||||
" msg['From'] = 'alerts@henry-pump.com'\n",
|
||||
" msg['To'] = \", \".join(reportToList[report_name])\n",
|
||||
"\n",
|
||||
" # Add a text body to the message (optional)\n",
|
||||
" body_text = 'Please find the attached spreadsheets.'\n",
|
||||
" msg.attach(MIMEText(body_text, 'plain'))\n",
|
||||
"\n",
|
||||
" # Attach each workbook in the spreadsheets array\n",
|
||||
" for spreadsheet in spreadsheets:\n",
|
||||
" # Attach the file to the email message\n",
|
||||
" attachment = MIMEBase('application', 'octet-stream')\n",
|
||||
" attachment.set_payload(open(spreadsheet.filename, \"rb\").read())\n",
|
||||
" encoders.encode_base64(attachment)\n",
|
||||
" attachment.add_header('Content-Disposition', 'attachment', filename=spreadsheet.filename)\n",
|
||||
"\n",
|
||||
" msg.attach(attachment)\n",
|
||||
"\n",
|
||||
" # Send the email using AWS SES\n",
|
||||
" response = ses_client.send_raw_email(\n",
|
||||
" \n",
|
||||
" RawMessage={'Data': msg.as_string()}\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" print(response)\n",
|
||||
" print(spreadsheets)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "thingsboard",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.4"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
244
Report Generator/lambda-python3.12/.gitignore
vendored
Normal file
244
Report Generator/lambda-python3.12/.gitignore
vendored
Normal file
@@ -0,0 +1,244 @@
|
||||
|
||||
# Created by https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode
|
||||
|
||||
### Linux ###
|
||||
*~
|
||||
|
||||
# temporary files which can be created if a process still has a handle open of a deleted file
|
||||
.fuse_hidden*
|
||||
|
||||
# KDE directory preferences
|
||||
.directory
|
||||
|
||||
# Linux trash folder which might appear on any partition or disk
|
||||
.Trash-*
|
||||
|
||||
# .nfs files are created when an open file is removed but is still being accessed
|
||||
.nfs*
|
||||
|
||||
### OSX ###
|
||||
*.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
### PyCharm ###
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
|
||||
# User-specific stuff:
|
||||
.idea/**/workspace.xml
|
||||
.idea/**/tasks.xml
|
||||
.idea/dictionaries
|
||||
|
||||
# Sensitive or high-churn files:
|
||||
.idea/**/dataSources/
|
||||
.idea/**/dataSources.ids
|
||||
.idea/**/dataSources.xml
|
||||
.idea/**/dataSources.local.xml
|
||||
.idea/**/sqlDataSources.xml
|
||||
.idea/**/dynamic.xml
|
||||
.idea/**/uiDesigner.xml
|
||||
|
||||
# Gradle:
|
||||
.idea/**/gradle.xml
|
||||
.idea/**/libraries
|
||||
|
||||
# CMake
|
||||
cmake-build-debug/
|
||||
|
||||
# Mongo Explorer plugin:
|
||||
.idea/**/mongoSettings.xml
|
||||
|
||||
## File-based project format:
|
||||
*.iws
|
||||
|
||||
## Plugin-specific files:
|
||||
|
||||
# IntelliJ
|
||||
/out/
|
||||
|
||||
# mpeltonen/sbt-idea plugin
|
||||
.idea_modules/
|
||||
|
||||
# JIRA plugin
|
||||
atlassian-ide-plugin.xml
|
||||
|
||||
# Cursive Clojure plugin
|
||||
.idea/replstate.xml
|
||||
|
||||
# Ruby plugin and RubyMine
|
||||
/.rakeTasks
|
||||
|
||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||
com_crashlytics_export_strings.xml
|
||||
crashlytics.properties
|
||||
crashlytics-build.properties
|
||||
fabric.properties
|
||||
|
||||
### PyCharm Patch ###
|
||||
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
|
||||
|
||||
# *.iml
|
||||
# modules.xml
|
||||
# .idea/misc.xml
|
||||
# *.ipr
|
||||
|
||||
# Sonarlint plugin
|
||||
.idea/sonarlint
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
.pytest_cache/
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule.*
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
### VisualStudioCode ###
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
.history
|
||||
|
||||
### Windows ###
|
||||
# Windows thumbnail cache files
|
||||
Thumbs.db
|
||||
ehthumbs.db
|
||||
ehthumbs_vista.db
|
||||
|
||||
# Folder config file
|
||||
Desktop.ini
|
||||
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Windows Installer files
|
||||
*.cab
|
||||
*.msi
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
|
||||
# Build folder
|
||||
|
||||
*/build/*
|
||||
|
||||
# End of https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode
|
||||
36
Report Generator/lambda-python3.12/README.TOOLKIT.md
Normal file
36
Report Generator/lambda-python3.12/README.TOOLKIT.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Developing AWS SAM Applications with the AWS Toolkit For Visual Studio Code
|
||||
|
||||
This project contains source code and supporting files for a serverless application that you can locally run, debug, and deploy to AWS with the AWS Toolkit For Visual Studio Code.
|
||||
|
||||
A "SAM" (serverless application model) project is a project that contains a template.yaml file which is understood by AWS tooling (such as SAM CLI, and the AWS Toolkit For Visual Studio Code).
|
||||
|
||||
## Writing and Debugging Serverless Applications
|
||||
|
||||
The code for this application will differ based on the runtime, but the path to a handler can be found in the [`template.yaml`](./template.yaml) file through a resource's `CodeUri` and `Handler` fields.
|
||||
|
||||
AWS Toolkit For Visual Studio Code supports local debugging for serverless applications through VS Code's debugger. Since this application was created by the AWS Toolkit, launch configurations for all included handlers have been generated and can be found in the menu next to the Run button:
|
||||
|
||||
|
||||
You can debug the Lambda handlers locally by adding a breakpoint to the source file, then running the launch configuration. This works by using Docker on your local machine.
|
||||
|
||||
Invocation parameters, including payloads and request parameters, can be edited either by the `Edit SAM Debug Configuration` command (through the Command Palette or CodeLens) or by editing the `launch.json` file.
|
||||
|
||||
AWS Lambda functions not defined in the [`template.yaml`](./template.yaml) file can be invoked and debugged by creating a launch configuration through the CodeLens over the function declaration, or with the `Add SAM Debug Configuration` command.
|
||||
|
||||
## Deploying Serverless Applications
|
||||
|
||||
You can deploy a serverless application by invoking the `AWS: Deploy SAM application` command through the Command Palette or by right-clicking the Lambda node in the AWS Explorer and entering the deployment region, a valid S3 bucket from the region, and the name of a CloudFormation stack to deploy to. You can monitor your deployment's progress through the `AWS Toolkit` Output Channel.
|
||||
|
||||
## Interacting With Deployed Serverless Applications
|
||||
|
||||
A successfully-deployed serverless application can be found in the AWS Explorer under region and CloudFormation node that the serverless application was deployed to.
|
||||
|
||||
In the AWS Explorer, you can invoke _remote_ AWS Lambda Functions by right-clicking the Lambda node and selecting "Invoke on AWS".
|
||||
|
||||
Similarly, if the Function declaration contained an API Gateway event, the API Gateway API can be found in the API Gateway node under the region node the serverless application was deployed to, and can be invoked via right-clicking the API node and selecting "Invoke on AWS".
|
||||
|
||||
## Resources
|
||||
|
||||
General information about this SAM project can be found in the [`README.md`](./README.md) file in this folder.
|
||||
|
||||
More information about using the AWS Toolkit For Visual Studio Code with serverless applications can be found [in the AWS documentation](https://docs.aws.amazon.com/toolkit-for-vscode/latest/userguide/serverless-apps.html) .
|
||||
130
Report Generator/lambda-python3.12/README.md
Normal file
130
Report Generator/lambda-python3.12/README.md
Normal file
@@ -0,0 +1,130 @@
|
||||
# lambda-python3.12
|
||||
|
||||
This project contains source code and supporting files for a serverless application that you can deploy with the SAM CLI. It includes the following files and folders.
|
||||
|
||||
- hello_world - Code for the application's Lambda function.
|
||||
- events - Invocation events that you can use to invoke the function.
|
||||
- tests - Unit tests for the application code.
|
||||
- template.yaml - A template that defines the application's AWS resources.
|
||||
|
||||
The application uses several AWS resources, including Lambda functions and an API Gateway API. These resources are defined in the `template.yaml` file in this project. You can update the template to add AWS resources through the same deployment process that updates your application code.
|
||||
|
||||
If you prefer to use an integrated development environment (IDE) to build and test your application, you can use the AWS Toolkit.
|
||||
The AWS Toolkit is an open source plug-in for popular IDEs that uses the SAM CLI to build and deploy serverless applications on AWS. The AWS Toolkit also adds a simplified step-through debugging experience for Lambda function code. See the following links to get started.
|
||||
|
||||
* [CLion](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
|
||||
* [GoLand](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
|
||||
* [IntelliJ](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
|
||||
* [WebStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
|
||||
* [Rider](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
|
||||
* [PhpStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
|
||||
* [PyCharm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
|
||||
* [RubyMine](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
|
||||
* [DataGrip](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
|
||||
* [VS Code](https://docs.aws.amazon.com/toolkit-for-vscode/latest/userguide/welcome.html)
|
||||
* [Visual Studio](https://docs.aws.amazon.com/toolkit-for-visual-studio/latest/user-guide/welcome.html)
|
||||
|
||||
## Deploy the sample application
|
||||
|
||||
The Serverless Application Model Command Line Interface (SAM CLI) is an extension of the AWS CLI that adds functionality for building and testing Lambda applications. It uses Docker to run your functions in an Amazon Linux environment that matches Lambda. It can also emulate your application's build environment and API.
|
||||
|
||||
To use the SAM CLI, you need the following tools.
|
||||
|
||||
* SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html)
|
||||
* [Python 3 installed](https://www.python.org/downloads/)
|
||||
* Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community)
|
||||
|
||||
To build and deploy your application for the first time, run the following in your shell:
|
||||
|
||||
```bash
|
||||
sam build --use-container
|
||||
sam deploy --guided
|
||||
```
|
||||
|
||||
The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts:
|
||||
|
||||
* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name.
|
||||
* **AWS Region**: The AWS region you want to deploy your app to.
|
||||
* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes.
|
||||
* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modifies IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command.
|
||||
* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application.
|
||||
|
||||
You can find your API Gateway Endpoint URL in the output values displayed after deployment.
|
||||
|
||||
## Use the SAM CLI to build and test locally
|
||||
|
||||
Build your application with the `sam build --use-container` command.
|
||||
|
||||
```bash
|
||||
lambda-python3.12$ sam build --use-container
|
||||
```
|
||||
|
||||
The SAM CLI installs dependencies defined in `hello_world/requirements.txt`, creates a deployment package, and saves it in the `.aws-sam/build` folder.
|
||||
|
||||
Test a single function by invoking it directly with a test event. An event is a JSON document that represents the input that the function receives from the event source. Test events are included in the `events` folder in this project.
|
||||
|
||||
Run functions locally and invoke them with the `sam local invoke` command.
|
||||
|
||||
```bash
|
||||
lambda-python3.12$ sam local invoke HelloWorldFunction --event events/event.json
|
||||
```
|
||||
|
||||
The SAM CLI can also emulate your application's API. Use the `sam local start-api` to run the API locally on port 3000.
|
||||
|
||||
```bash
|
||||
lambda-python3.12$ sam local start-api
|
||||
lambda-python3.12$ curl http://localhost:3000/
|
||||
```
|
||||
|
||||
The SAM CLI reads the application template to determine the API's routes and the functions that they invoke. The `Events` property on each function's definition includes the route and method for each path.
|
||||
|
||||
```yaml
|
||||
Events:
|
||||
HelloWorld:
|
||||
Type: Api
|
||||
Properties:
|
||||
Path: /hello
|
||||
Method: get
|
||||
```
|
||||
|
||||
## Add a resource to your application
|
||||
The application template uses AWS Serverless Application Model (AWS SAM) to define application resources. AWS SAM is an extension of AWS CloudFormation with a simpler syntax for configuring common serverless application resources such as functions, triggers, and APIs. For resources not included in [the SAM specification](https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md), you can use standard [AWS CloudFormation](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) resource types.
|
||||
|
||||
## Fetch, tail, and filter Lambda function logs
|
||||
|
||||
To simplify troubleshooting, SAM CLI has a command called `sam logs`. `sam logs` lets you fetch logs generated by your deployed Lambda function from the command line. In addition to printing the logs on the terminal, this command has several nifty features to help you quickly find the bug.
|
||||
|
||||
`NOTE`: This command works for all AWS Lambda functions; not just the ones you deploy using SAM.
|
||||
|
||||
```bash
|
||||
lambda-python3.12$ sam logs -n HelloWorldFunction --stack-name "lambda-python3.12" --tail
|
||||
```
|
||||
|
||||
You can find more information and examples about filtering Lambda function logs in the [SAM CLI Documentation](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-logging.html).
|
||||
|
||||
## Tests
|
||||
|
||||
Tests are defined in the `tests` folder in this project. Use PIP to install the test dependencies and run tests.
|
||||
|
||||
```bash
|
||||
lambda-python3.12$ pip install -r tests/requirements.txt --user
|
||||
# unit test
|
||||
lambda-python3.12$ python -m pytest tests/unit -v
|
||||
# integration test, requiring deploying the stack first.
|
||||
# Create the env variable AWS_SAM_STACK_NAME with the name of the stack we are testing
|
||||
lambda-python3.12$ AWS_SAM_STACK_NAME="lambda-python3.12" python -m pytest tests/integration -v
|
||||
```
|
||||
|
||||
## Cleanup
|
||||
|
||||
To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following:
|
||||
|
||||
```bash
|
||||
sam delete --stack-name "lambda-python3.12"
|
||||
```
|
||||
|
||||
## Resources
|
||||
|
||||
See the [AWS SAM developer guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html) for an introduction to SAM specification, the SAM CLI, and serverless application concepts.
|
||||
|
||||
Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/)
|
||||
0
Report Generator/lambda-python3.12/__init__.py
Normal file
0
Report Generator/lambda-python3.12/__init__.py
Normal file
2
Report Generator/lambda-python3.12/build-deploy.sh
Executable file
2
Report Generator/lambda-python3.12/build-deploy.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
DOCKER_HOST=unix:///Users/nico/.docker/run/docker.sock sam build --use-container
|
||||
DOCKER_HOST=unix:///Users/nico/.docker/run/docker.sock sam deploy
|
||||
62
Report Generator/lambda-python3.12/events/event.json
Normal file
62
Report Generator/lambda-python3.12/events/event.json
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"body": "{\"message\": \"hello world\"}",
|
||||
"resource": "/hello",
|
||||
"path": "/hello",
|
||||
"httpMethod": "GET",
|
||||
"isBase64Encoded": false,
|
||||
"queryStringParameters": {
|
||||
"foo": "bar"
|
||||
},
|
||||
"pathParameters": {
|
||||
"proxy": "/path/to/resource"
|
||||
},
|
||||
"stageVariables": {
|
||||
"baz": "qux"
|
||||
},
|
||||
"headers": {
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||
"Accept-Encoding": "gzip, deflate, sdch",
|
||||
"Accept-Language": "en-US,en;q=0.8",
|
||||
"Cache-Control": "max-age=0",
|
||||
"CloudFront-Forwarded-Proto": "https",
|
||||
"CloudFront-Is-Desktop-Viewer": "true",
|
||||
"CloudFront-Is-Mobile-Viewer": "false",
|
||||
"CloudFront-Is-SmartTV-Viewer": "false",
|
||||
"CloudFront-Is-Tablet-Viewer": "false",
|
||||
"CloudFront-Viewer-Country": "US",
|
||||
"Host": "1234567890.execute-api.us-east-1.amazonaws.com",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"User-Agent": "Custom User Agent String",
|
||||
"Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)",
|
||||
"X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==",
|
||||
"X-Forwarded-For": "127.0.0.1, 127.0.0.2",
|
||||
"X-Forwarded-Port": "443",
|
||||
"X-Forwarded-Proto": "https"
|
||||
},
|
||||
"requestContext": {
|
||||
"accountId": "123456789012",
|
||||
"resourceId": "123456",
|
||||
"stage": "prod",
|
||||
"requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef",
|
||||
"requestTime": "09/Apr/2015:12:34:56 +0000",
|
||||
"requestTimeEpoch": 1428582896000,
|
||||
"identity": {
|
||||
"cognitoIdentityPoolId": null,
|
||||
"accountId": null,
|
||||
"cognitoIdentityId": null,
|
||||
"caller": null,
|
||||
"accessKey": null,
|
||||
"sourceIp": "127.0.0.1",
|
||||
"cognitoAuthenticationType": null,
|
||||
"cognitoAuthenticationProvider": null,
|
||||
"userArn": null,
|
||||
"userAgent": "Custom User Agent String",
|
||||
"user": null
|
||||
},
|
||||
"path": "/prod/hello",
|
||||
"resourcePath": "/hello",
|
||||
"httpMethod": "POST",
|
||||
"apiId": "1234567890",
|
||||
"protocol": "HTTP/1.1"
|
||||
}
|
||||
}
|
||||
36
Report Generator/lambda-python3.12/samconfig.toml
Normal file
36
Report Generator/lambda-python3.12/samconfig.toml
Normal file
@@ -0,0 +1,36 @@
|
||||
# More information about the configuration file can be found here:
|
||||
# https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html
|
||||
version = 0.1
|
||||
|
||||
[default]
|
||||
[default.global.parameters]
|
||||
stack_name = "lambda-python3.12"
|
||||
|
||||
[default.build.parameters]
|
||||
cached = true
|
||||
parallel = true
|
||||
|
||||
[default.validate.parameters]
|
||||
lint = true
|
||||
|
||||
[default.deploy.parameters]
|
||||
capabilities = "CAPABILITY_IAM"
|
||||
confirm_changeset = true
|
||||
resolve_s3 = true
|
||||
stack_name = "tbreport"
|
||||
s3_prefix = "tbreport"
|
||||
region = "us-east-1"
|
||||
image_repositories = []
|
||||
disable_rollback = true
|
||||
|
||||
[default.package.parameters]
|
||||
resolve_s3 = true
|
||||
|
||||
[default.sync.parameters]
|
||||
watch = true
|
||||
|
||||
[default.local_start_api.parameters]
|
||||
warm_containers = "EAGER"
|
||||
|
||||
[default.local_start_lambda.parameters]
|
||||
warm_containers = "EAGER"
|
||||
42
Report Generator/lambda-python3.12/tbreport/app.py
Normal file
42
Report Generator/lambda-python3.12/tbreport/app.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import json
|
||||
|
||||
# import requests
|
||||
|
||||
|
||||
def lambda_handler(event, context):
|
||||
"""Sample pure Lambda function
|
||||
|
||||
Parameters
|
||||
----------
|
||||
event: dict, required
|
||||
API Gateway Lambda Proxy Input Format
|
||||
|
||||
Event doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html#api-gateway-simple-proxy-for-lambda-input-format
|
||||
|
||||
context: object, required
|
||||
Lambda Context runtime methods and attributes
|
||||
|
||||
Context doc: https://docs.aws.amazon.com/lambda/latest/dg/python-context-object.html
|
||||
|
||||
Returns
|
||||
------
|
||||
API Gateway Lambda Proxy Output Format: dict
|
||||
|
||||
Return doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html
|
||||
"""
|
||||
|
||||
# try:
|
||||
# ip = requests.get("http://checkip.amazonaws.com/")
|
||||
# except requests.RequestException as e:
|
||||
# # Send some context about this error to Lambda Logs
|
||||
# print(e)
|
||||
|
||||
# raise e
|
||||
|
||||
return {
|
||||
"statusCode": 200,
|
||||
"body": json.dumps({
|
||||
"message": "hello world",
|
||||
# "location": ip.text.replace("\n", "")
|
||||
}),
|
||||
}
|
||||
64
Report Generator/lambda-python3.12/tbreport/config.json
Normal file
64
Report Generator/lambda-python3.12/tbreport/config.json
Normal file
@@ -0,0 +1,64 @@
|
||||
[
|
||||
{
|
||||
"emails": [
|
||||
"nmelone@henry-pump.com"
|
||||
],
|
||||
"customers": {
|
||||
"ec691940-52e2-11ec-a919-556e8dbef35c": {
|
||||
"name": "CrownQuest",
|
||||
"deviceTypes": [
|
||||
{
|
||||
"deviceType": "rigpump",
|
||||
"dataPoints": [
|
||||
"vfd_current",
|
||||
"vfd_frequency"
|
||||
]
|
||||
},
|
||||
{
|
||||
"deviceType": "cqwatertanks",
|
||||
"dataPoints": [
|
||||
"fm_1_flow_rate"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"81083430-6988-11ec-a919-556e8dbef35c": {
|
||||
"name": "Henry-Petroleum",
|
||||
"deviceTypes": [
|
||||
{
|
||||
"deviceType": "abbflow",
|
||||
"dataPoints": [
|
||||
"accumulated_volume"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"filterDevicesIn": [],
|
||||
"filterDevicesOut": [],
|
||||
"name": "CrownQuest-Daily-Report"
|
||||
},
|
||||
{
|
||||
"emails": [
|
||||
"nmelone@henry-pump.com"
|
||||
],
|
||||
"customers": {
|
||||
"81083430-6988-11ec-a919-556e8dbef35c": {
|
||||
"name": "Henry-Petroleum",
|
||||
"deviceTypes": [
|
||||
{
|
||||
"deviceType": "abbflow",
|
||||
"dataPoints": [
|
||||
"accumulated_volume"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"filterDevicesIn": [
|
||||
"0090dbd0-abb0-11ec-90c2-ad8278896f52"
|
||||
],
|
||||
"filterDevicesOut": [],
|
||||
"name": "Henry-Petroleum-Daily-Report"
|
||||
}
|
||||
]
|
||||
6
Report Generator/lambda-python3.12/tbreport/handler.py
Normal file
6
Report Generator/lambda-python3.12/tbreport/handler.py
Normal file
@@ -0,0 +1,6 @@
|
||||
import json
|
||||
def handler(event, context):
|
||||
# Log the event argument for debugging and for use in local development.
|
||||
print(json.dumps(event))
|
||||
|
||||
return {}
|
||||
114
Report Generator/lambda-python3.12/tbreport/tbreport.py
Normal file
114
Report Generator/lambda-python3.12/tbreport/tbreport.py
Normal file
@@ -0,0 +1,114 @@
|
||||
from tb_rest_client.rest_client_pe import *
|
||||
from tb_rest_client.rest import ApiException
|
||||
import json, xlsxwriter, boto3, os
|
||||
from datetime import datetime as dt
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from email import encoders
|
||||
from email.mime.base import MIMEBase
|
||||
|
||||
def lambda_handler(event, context):
|
||||
#Creating Rest Client for ThingsBoard
|
||||
with RestClientPE(base_url="https://hp.henrypump.cloud") as rest_client:
|
||||
try:
|
||||
rest_client.login(username=os.environ["username"], password=os.environ["password"])
|
||||
#Loading Config from file
|
||||
with open("./config.json") as f:
|
||||
config = json.load(f)
|
||||
|
||||
reportData = {}
|
||||
reportToList = {}
|
||||
#Loop through each item in config, each item represents a report
|
||||
for report in config:
|
||||
reportToList[report["name"]] = report["emails"]
|
||||
#Each customer becomes it's own xlsx file later
|
||||
for customer in report["customers"].keys():
|
||||
#Get all the devices for a given customer
|
||||
devices = rest_client.get_customer_devices(customer_id=customer, page=0, page_size=100)
|
||||
#Filter devices to the desired devices
|
||||
if report["filterDevicesIn"]:
|
||||
devices.data = [device for device in devices.data if device.id.id in report["filterDevicesIn"]]
|
||||
if report["filterDevicesOut"]:
|
||||
devices.data = [device for device in devices.data if device.id.id not in report["filterDevicesOut"]]
|
||||
#Create the report in reportData if needed
|
||||
if not reportData.get(report["name"], None):
|
||||
reportData[report["name"]] = {}
|
||||
#Go through all the devices and add them and their desired data to reportData
|
||||
for device in devices.data:
|
||||
name = device.name
|
||||
keys = rest_client.get_timeseries_keys_v1(device.id)
|
||||
#Filter keys to desired keys
|
||||
for deviceType in report["customers"][customer]["deviceTypes"]:
|
||||
if device.type == deviceType["deviceType"]:
|
||||
keys = list(filter(lambda x: x in deviceType["dataPoints"], keys))
|
||||
#Create customer if needed
|
||||
if not reportData[report["name"]].get(report["customers"][customer]["name"], None):
|
||||
reportData[report["name"]][report["customers"][customer]["name"]] = {}
|
||||
#Check to make sure the deviceType is desired in the report for the given device
|
||||
if device.type in list(map(lambda x: x["deviceType"], report["customers"][customer]["deviceTypes"])):
|
||||
#Create deviceType if needed
|
||||
if not reportData[report["name"]][report["customers"][customer]["name"]].get(device.type, None):
|
||||
reportData[report["name"]][report["customers"][customer]["name"]][device.type] = {}
|
||||
reportData[report["name"]][report["customers"][customer]["name"]][device.type][device.name] = rest_client.get_latest_timeseries(entity_id=device.id , keys=",".join(keys))
|
||||
except ApiException as e:
|
||||
print(e)
|
||||
|
||||
|
||||
# Create an AWS SES client
|
||||
ses_client = boto3.client('ses', region_name='us-east-1')
|
||||
|
||||
|
||||
# Create a workbook for each report
|
||||
for report_name, report_data in reportData.items():
|
||||
#will generate an email lower down
|
||||
spreadsheets = []
|
||||
# Create a worksheet for each company
|
||||
for company_name, company_data in report_data.items():
|
||||
workbook = xlsxwriter.Workbook(f"/tmp/{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx")
|
||||
bold = workbook.add_format({'bold': True})
|
||||
# Create a sheet for each device type
|
||||
for device_type, device_data in company_data.items():
|
||||
worksheet = workbook.add_worksheet(device_type)
|
||||
|
||||
# Set the header row with device types
|
||||
device_types = list(device_data.keys())
|
||||
worksheet.write_column(1, 0, device_types,bold)
|
||||
|
||||
# Write the data to the sheet
|
||||
for i, (telemetry_name, telemetry_data) in enumerate(device_data.items()):
|
||||
# Set the header row with telemetry names
|
||||
telemetry_names = list(telemetry_data.keys())
|
||||
worksheet.write_row(0, 1, telemetry_names, bold)
|
||||
for j, (data_name, data) in enumerate(telemetry_data.items()):
|
||||
values = [d["value"] for d in data]
|
||||
worksheet.write_row(i + 1, j+ 1, values)
|
||||
worksheet.autofit()
|
||||
workbook.close()
|
||||
spreadsheets.append(workbook)
|
||||
# Create an email message
|
||||
msg = MIMEMultipart()
|
||||
msg['Subject'] = report_name
|
||||
msg['From'] = 'alerts@henry-pump.com'
|
||||
msg['To'] = ", ".join(reportToList[report_name])
|
||||
|
||||
# Add a text body to the message (optional)
|
||||
body_text = 'Please find the attached spreadsheets.'
|
||||
msg.attach(MIMEText(body_text, 'plain'))
|
||||
|
||||
# Attach each workbook in the spreadsheets array
|
||||
for spreadsheet in spreadsheets:
|
||||
# Attach the file to the email message
|
||||
attachment = MIMEBase('application', 'octet-stream')
|
||||
attachment.set_payload(open(spreadsheet.filename, "rb").read())
|
||||
encoders.encode_base64(attachment)
|
||||
attachment.add_header('Content-Disposition', 'attachment', filename=spreadsheet.filename[5:])
|
||||
|
||||
msg.attach(attachment)
|
||||
|
||||
# Send the email using AWS SES
|
||||
response = ses_client.send_raw_email(
|
||||
|
||||
RawMessage={'Data': msg.as_string()}
|
||||
)
|
||||
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
xlsxwriter
|
||||
tb-rest-client
|
||||
68
Report Generator/lambda-python3.12/template.yaml
Normal file
68
Report Generator/lambda-python3.12/template.yaml
Normal file
@@ -0,0 +1,68 @@
|
||||
AWSTemplateFormatVersion: '2010-09-09'
|
||||
Transform: AWS::Serverless-2016-10-31
|
||||
Description: |
|
||||
lambda-python3.12
|
||||
Sample SAM Template for lambda-python3.12
|
||||
|
||||
# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst
|
||||
Globals:
|
||||
Function:
|
||||
Timeout: 3
|
||||
|
||||
Resources:
|
||||
TBReport:
|
||||
Type: AWS::Serverless::Function # More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction
|
||||
Properties:
|
||||
MemorySize: 128
|
||||
Timeout: 300
|
||||
Environment:
|
||||
Variables:
|
||||
username: henry.pump.automation@gmail.com
|
||||
password: Henry Pump @ 2022
|
||||
Architectures:
|
||||
- arm64
|
||||
CodeUri: tbreport
|
||||
Runtime: python3.12
|
||||
Handler: tbreport.lambda_handler
|
||||
Policies: AmazonSESFullAccess
|
||||
Layers:
|
||||
- !Ref TBReportLayer
|
||||
TBReportLayer:
|
||||
Type: AWS::Serverless::LayerVersion
|
||||
Properties:
|
||||
ContentUri: tbreportlayer/
|
||||
CompatibleRuntimes:
|
||||
- python3.9
|
||||
- python3.10
|
||||
- python3.11
|
||||
- python3.12
|
||||
Metadata:
|
||||
BuildMethod: python3.9
|
||||
Schedule:
|
||||
Type: AWS::Scheduler::Schedule
|
||||
Properties:
|
||||
ScheduleExpression: cron(0 5 * * ? *)
|
||||
FlexibleTimeWindow:
|
||||
Mode: 'OFF'
|
||||
ScheduleExpressionTimezone: America/Chicago
|
||||
Target:
|
||||
Arn: !GetAtt TBReport.Arn
|
||||
RoleArn: !GetAtt ScheduleToTBReportRole.Arn
|
||||
ScheduleToTBReportRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
Effect: Allow
|
||||
Principal:
|
||||
Service: !Sub scheduler.${AWS::URLSuffix}
|
||||
Action: sts:AssumeRole
|
||||
Policies:
|
||||
- PolicyName: StartExecutionPolicy
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action: lambda:InvokeFunction
|
||||
Resource: !GetAtt TBReport.Arn
|
||||
@@ -0,0 +1,45 @@
|
||||
import os
|
||||
|
||||
import boto3
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
"""
|
||||
Make sure env variable AWS_SAM_STACK_NAME exists with the name of the stack we are going to test.
|
||||
"""
|
||||
|
||||
|
||||
class TestApiGateway:
|
||||
|
||||
@pytest.fixture()
|
||||
def api_gateway_url(self):
|
||||
""" Get the API Gateway URL from Cloudformation Stack outputs """
|
||||
stack_name = os.environ.get("AWS_SAM_STACK_NAME")
|
||||
|
||||
if stack_name is None:
|
||||
raise ValueError('Please set the AWS_SAM_STACK_NAME environment variable to the name of your stack')
|
||||
|
||||
client = boto3.client("cloudformation")
|
||||
|
||||
try:
|
||||
response = client.describe_stacks(StackName=stack_name)
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
f"Cannot find stack {stack_name} \n" f'Please make sure a stack with the name "{stack_name}" exists'
|
||||
) from e
|
||||
|
||||
stacks = response["Stacks"]
|
||||
stack_outputs = stacks[0]["Outputs"]
|
||||
api_outputs = [output for output in stack_outputs if output["OutputKey"] == "HelloWorldApi"]
|
||||
|
||||
if not api_outputs:
|
||||
raise KeyError(f"HelloWorldAPI not found in stack {stack_name}")
|
||||
|
||||
return api_outputs[0]["OutputValue"] # Extract url from stack outputs
|
||||
|
||||
def test_api_gateway(self, api_gateway_url):
|
||||
""" Call the API Gateway endpoint and check the response """
|
||||
response = requests.get(api_gateway_url)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json() == {"message": "hello world"}
|
||||
@@ -0,0 +1,3 @@
|
||||
pytest
|
||||
boto3
|
||||
requests
|
||||
@@ -0,0 +1,72 @@
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from hello_world import app
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def apigw_event():
|
||||
""" Generates API GW Event"""
|
||||
|
||||
return {
|
||||
"body": '{ "test": "body"}',
|
||||
"resource": "/{proxy+}",
|
||||
"requestContext": {
|
||||
"resourceId": "123456",
|
||||
"apiId": "1234567890",
|
||||
"resourcePath": "/{proxy+}",
|
||||
"httpMethod": "POST",
|
||||
"requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef",
|
||||
"accountId": "123456789012",
|
||||
"identity": {
|
||||
"apiKey": "",
|
||||
"userArn": "",
|
||||
"cognitoAuthenticationType": "",
|
||||
"caller": "",
|
||||
"userAgent": "Custom User Agent String",
|
||||
"user": "",
|
||||
"cognitoIdentityPoolId": "",
|
||||
"cognitoIdentityId": "",
|
||||
"cognitoAuthenticationProvider": "",
|
||||
"sourceIp": "127.0.0.1",
|
||||
"accountId": "",
|
||||
},
|
||||
"stage": "prod",
|
||||
},
|
||||
"queryStringParameters": {"foo": "bar"},
|
||||
"headers": {
|
||||
"Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)",
|
||||
"Accept-Language": "en-US,en;q=0.8",
|
||||
"CloudFront-Is-Desktop-Viewer": "true",
|
||||
"CloudFront-Is-SmartTV-Viewer": "false",
|
||||
"CloudFront-Is-Mobile-Viewer": "false",
|
||||
"X-Forwarded-For": "127.0.0.1, 127.0.0.2",
|
||||
"CloudFront-Viewer-Country": "US",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"X-Forwarded-Port": "443",
|
||||
"Host": "1234567890.execute-api.us-east-1.amazonaws.com",
|
||||
"X-Forwarded-Proto": "https",
|
||||
"X-Amz-Cf-Id": "aaaaaaaaaae3VYQb9jd-nvCd-de396Uhbp027Y2JvkCPNLmGJHqlaA==",
|
||||
"CloudFront-Is-Tablet-Viewer": "false",
|
||||
"Cache-Control": "max-age=0",
|
||||
"User-Agent": "Custom User Agent String",
|
||||
"CloudFront-Forwarded-Proto": "https",
|
||||
"Accept-Encoding": "gzip, deflate, sdch",
|
||||
},
|
||||
"pathParameters": {"proxy": "/examplepath"},
|
||||
"httpMethod": "POST",
|
||||
"stageVariables": {"baz": "qux"},
|
||||
"path": "/examplepath",
|
||||
}
|
||||
|
||||
|
||||
def test_lambda_handler(apigw_event):
|
||||
|
||||
ret = app.lambda_handler(apigw_event, "")
|
||||
data = json.loads(ret["body"])
|
||||
|
||||
assert ret["statusCode"] == 200
|
||||
assert "message" in ret["body"]
|
||||
assert data["message"] == "hello world"
|
||||
BIN
Widgets/.DS_Store
vendored
BIN
Widgets/.DS_Store
vendored
Binary file not shown.
71
getAllTelemetry.ipynb
Normal file
71
getAllTelemetry.ipynb
Normal file
@@ -0,0 +1,71 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tb_rest_client.rest_client_pe import *\n",
|
||||
"from tb_rest_client.rest import ApiException\n",
|
||||
"from tb_rest_client.api_client import *\n",
|
||||
"import json\n",
|
||||
"from datetime import datetime as dt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def convertDateTimeToMS(datestring):\n",
|
||||
" date = dt.strptime(datestring,\"%d %b %Y, %H:%M:%S\")\n",
|
||||
" return int(date.timestamp() * 1000)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"url = \"https://hp.henrypump.cloud\"\n",
|
||||
"username = \"example@example.com\"\n",
|
||||
"password = \"password123\"\n",
|
||||
"with RestClientPE(base_url=url) as rest_client:\n",
|
||||
" try:\n",
|
||||
" rest_client.login(username=username, password=password)\n",
|
||||
" cid = \"0d4427e0-18ae-11ed-8e6d-5b69d1f90f37\" #This is the Customer ID for Henry Resources and doesn't need changed\n",
|
||||
" devices = rest_client.get_customer_devices(customer_id=cid, page=0, page_size=100)\n",
|
||||
" #print(json.dumps(device.to_dict(),indent=4))\n",
|
||||
" #print(devices)\n",
|
||||
" for device in devices.data:\n",
|
||||
" eType = device.id.entity_type\n",
|
||||
" eid = device.id.id\n",
|
||||
" name = device.name\n",
|
||||
" start = convertDateTimeToMS(\"01 Jan 2024, 00:00:00\") #The date time for when the data should start\n",
|
||||
" end = int(dt.now().timestamp() * 1000) #automatically sets to now\n",
|
||||
" #print(eType, eid, name)\n",
|
||||
" keys = rest_client.get_timeseries_keys_v1(eType,eid)\n",
|
||||
" telemetry = rest_client.get_timeseries(entity_type=eType, entity_id=eid , keys=\",\".join(keys), start_ts=start, end_ts=end, limit=10000)\n",
|
||||
" print([telemetry])\n",
|
||||
" except ApiException as e:\n",
|
||||
" print(e)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "thingsboard",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
"version": "3.10.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
32
getAllTelemetry.py
Normal file
32
getAllTelemetry.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from tb_rest_client.rest_client_pe import *
|
||||
from tb_rest_client.rest import ApiException
|
||||
from tb_rest_client.api_client import *
|
||||
import json
|
||||
from datetime import datetime as dt
|
||||
|
||||
def convertDateTimeToMS(datestring):
|
||||
date = dt.strptime(datestring,"%d %b %Y, %H:%M:%S")
|
||||
return int(date.timestamp() * 1000)
|
||||
|
||||
url = "https://hp.henrypump.cloud"
|
||||
username = "example@example.com"
|
||||
password = "password123"
|
||||
with RestClientPE(base_url=url) as rest_client:
|
||||
try:
|
||||
rest_client.login(username=username, password=password)
|
||||
cid = "0d4427e0-18ae-11ed-8e6d-5b69d1f90f37" #This is the Customer ID for Henry Resources and doesn't need changed
|
||||
devices = rest_client.get_customer_devices(customer_id=cid, page=0, page_size=100)
|
||||
#print(json.dumps(device.to_dict(),indent=4))
|
||||
#print(devices)
|
||||
for device in #devices.data:
|
||||
eType = device.id.entity_type
|
||||
eid = device.id.id
|
||||
name = device.name
|
||||
start = convertDateTimeToMS("01 Jan 2024, 00:00:00") #The date time for when the data should start
|
||||
end = int(dt.now().timestamp() * 1000) #automatically sets to now
|
||||
#print(eType, eid, name)
|
||||
keys = rest_client.get_timeseries_keys_v1(eType,eid)
|
||||
telemetry = rest_client.get_timeseries(entity_type=eType, entity_id=eid , keys=",".join(keys), start_ts=start, end_ts=end, limit=10000)
|
||||
print([telemetry])
|
||||
except ApiException as e:
|
||||
print(e)
|
||||
@@ -17,16 +17,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"tb = ThingsBoardAPI(\"nmelone@henry-pump.com\", \"gzU6$26v42mU%3jDzTJf\", \"hp.henrypump.cloud\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#tb"
|
||||
"tb = ThingsBoardAPI(\"nmelone08@gmail.com\", \"zaq1ZAQ!\", \"hp.henrypump.cloud\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -36,7 +27,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"customers = tb.getCustomers()\n",
|
||||
"devices = tb.getDevices(customers=customers, target_customer=\"Amerus Safety Solutions\", pageSize=100)\n",
|
||||
"devices = tb.getDevices(customers=customers, target_customer=\"Henry Resources\", pageSize=100)\n",
|
||||
"eType, did, keys, err = tb.getDeviceKeys(devices=devices, target_device=\"Camera Trailer 106\")\n",
|
||||
"#print(tb.getDeviceKeys(devices=devices, target_device=\"Camera Trailer 106\"))\n",
|
||||
"if not err:\n",
|
||||
|
||||
BIN
meshifyDrivers/.DS_Store
vendored
BIN
meshifyDrivers/.DS_Store
vendored
Binary file not shown.
301
meshifyDrivers/advvfdipp/Channel.py
Normal file
301
meshifyDrivers/advvfdipp/Channel.py
Normal file
@@ -0,0 +1,301 @@
|
||||
"""Define Meshify channel class."""
|
||||
import time
|
||||
import urllib
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from pycomm.cip.cip_base import CommError, DataError
|
||||
from file_logger import filelogger as log
|
||||
|
||||
|
||||
|
||||
TAG_DATAERROR_SLEEPTIME = 5
|
||||
|
||||
def binarray(intval):
|
||||
"""Split an integer into its bits."""
|
||||
bin_string = '{0:08b}'.format(intval)
|
||||
bin_arr = [i for i in bin_string]
|
||||
bin_arr.reverse()
|
||||
return bin_arr
|
||||
|
||||
|
||||
def read_tag(addr, tag, plc_type="CLX"):
|
||||
"""Read a tag from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
val = clx.read_tag(tag)
|
||||
clx.close()
|
||||
return val
|
||||
except DataError as err:
|
||||
clx.close()
|
||||
time.sleep(TAG_DATAERROR_SLEEPTIME)
|
||||
log.error("Data Error during readTag({}, {}): {}".format(addr, tag, err))
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
clx.close()
|
||||
log.error("Could not connect during readTag({}, {})".format(addr, tag))
|
||||
except AttributeError as err:
|
||||
clx.close()
|
||||
log.error("AttributeError during readTag({}, {}): \n{}".format(addr, tag, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
def read_array(addr, tag, start, end, plc_type="CLX"):
|
||||
"""Read an array from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
val = clx.read_tag(tag_w_index)
|
||||
arr_vals.append(round(val[0], 4))
|
||||
if arr_vals:
|
||||
clx.close()
|
||||
return arr_vals
|
||||
else:
|
||||
log.error("No length for {}".format(addr))
|
||||
clx.close()
|
||||
return False
|
||||
except Exception:
|
||||
log.error("Error during readArray({}, {}, {}, {})".format(addr, tag, start, end))
|
||||
err = clx.get_status()
|
||||
clx.close()
|
||||
log.error(err)
|
||||
clx.close()
|
||||
|
||||
|
||||
def write_tag(addr, tag, val, plc_type="CLX"):
|
||||
"""Write a tag value to the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
initial_val = clx.read_tag(tag)
|
||||
write_status = clx.write_tag(tag, val, initial_val[1])
|
||||
clx.close()
|
||||
return write_status
|
||||
except DataError as err:
|
||||
clx_err = clx.get_status()
|
||||
clx.close()
|
||||
log.error("--\nDataError during writeTag({}, {}, {}, plc_type={}) -- {}\n{}\n".format(addr, tag, val, plc_type, err, clx_err))
|
||||
|
||||
except CommError as err:
|
||||
clx_err = clx.get_status()
|
||||
log.error("--\nCommError during write_tag({}, {}, {}, plc_type={})\n{}\n--".format(addr, tag, val, plc_type, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
class Channel(object):
|
||||
"""Holds the configuration for a Meshify channel."""
|
||||
|
||||
def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
self.mesh_name = mesh_name
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def __str__(self):
|
||||
"""Create a string for the channel."""
|
||||
return "{}\nvalue: {}, last_send_time: {}".format(self.mesh_name, self.value, self.last_send_time)
|
||||
|
||||
def check(self, new_value, force_send=False):
|
||||
"""Check to see if the new_value needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.data_type == 'BOOL' or self.data_type == 'STRING':
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.value != new_value:
|
||||
if self.map_:
|
||||
if not self.value == self.map_[new_value]:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
else:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
else:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif abs(self.value - new_value) > self.chg_threshold:
|
||||
send_needed = True
|
||||
send_reason = "change threshold"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
if send_needed:
|
||||
self.last_value = self.value
|
||||
if self.map_:
|
||||
try:
|
||||
self.value = self.map_[new_value]
|
||||
except KeyError:
|
||||
log.error("Cannot find a map value for {} in {} for {}".format(new_value, self.map_, self.mesh_name))
|
||||
self.value = new_value
|
||||
else:
|
||||
self.value = new_value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
pass
|
||||
|
||||
|
||||
def identity(sent):
|
||||
"""Return exactly what was sent to it."""
|
||||
return sent
|
||||
|
||||
|
||||
class ModbusChannel(Channel):
|
||||
"""Modbus channel object."""
|
||||
|
||||
def __init__(self, mesh_name, register_number, data_type, chg_threshold, guarantee_sec, channel_size=1, map_=False, write_enabled=False, transform_fn=identity):
|
||||
"""Initialize the channel."""
|
||||
super(ModbusChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.mesh_name = mesh_name
|
||||
self.register_number = register_number
|
||||
self.channel_size = channel_size
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.transform_fn = transform_fn
|
||||
|
||||
def read(self, mbsvalue):
|
||||
"""Return the transformed read value."""
|
||||
return self.transform_fn(mbsvalue)
|
||||
|
||||
|
||||
class PLCChannel(Channel):
|
||||
"""PLC Channel Object."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False, plc_type='CLX'):
|
||||
"""Initialize the channel."""
|
||||
super(PLCChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.plc_type = plc_type
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
plc_value = None
|
||||
if self.plc_tag and self.plc_ip:
|
||||
read_value = read_tag(self.plc_ip, self.plc_tag, plc_type=self.plc_type)
|
||||
if read_value:
|
||||
plc_value = read_value[0]
|
||||
|
||||
return plc_value
|
||||
|
||||
|
||||
class BoolArrayChannels(Channel):
|
||||
"""Hold the configuration for a set of boolean array channels."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
super(BoolArrayChannels, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def compare_values(self, new_val_dict):
|
||||
"""Compare new values to old values to see if the values need storing."""
|
||||
send = False
|
||||
for idx in new_val_dict:
|
||||
try:
|
||||
if new_val_dict[idx] != self.last_value[idx]:
|
||||
send = True
|
||||
except KeyError:
|
||||
log.error("Key Error in self.compare_values for index {}".format(idx))
|
||||
send = True
|
||||
return send
|
||||
|
||||
def read(self, force_send=False):
|
||||
"""Read the value and check to see if needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.plc_tag:
|
||||
val = read_tag(self.plc_ip, self.plc_tag)
|
||||
if val:
|
||||
bool_arr = binarray(val[0])
|
||||
new_val = {}
|
||||
for idx in self.map_:
|
||||
try:
|
||||
new_val[self.map_[idx]] = bool_arr[idx]
|
||||
except KeyError:
|
||||
log.error("Not able to get value for index {}".format(idx))
|
||||
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.compare_values(new_val):
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
|
||||
if send_needed:
|
||||
self.value = new_val
|
||||
self.last_value = self.value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
49
meshifyDrivers/advvfdipp/Tags.py
Normal file
49
meshifyDrivers/advvfdipp/Tags.py
Normal file
@@ -0,0 +1,49 @@
|
||||
from Channel import PLCChannel, ModbusChannel
|
||||
from advvfdipp import PLC_IP_ADDRESS
|
||||
|
||||
tags = [
|
||||
PLCChannel(PLC_IP_ADDRESS, "flowrate","val_Flowmeter","REAL", 300, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "fluidlevel","val_FluidLevel","REAL", 2, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "intakepressure","val_IntakePressure","REAL", 10, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "intaketemperature","val_IntakeTemperature","REAL", 5, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "tubingpressure","val_TubingPressure","REAL", 10, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "pidcontrolmode","sts_PID_Control","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "wellstatus","Device_Status_INT","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "vfdfrequency","VFD_SpeedFdbk","REAL", 5, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "flowtotal","Flow_Total[0]","REAL", 100, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "energytotal","Energy_Total[0]","REAL", 10, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "vfdcurrent","VFD_OutCurrent","REAL", 5, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "downholesensorstatus","Downhole_Sensor_Status_INT","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "fluidspecificgravity","cfg_FluidSpecificGravity","REAL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "flowtotalyesterday","Flow_Total[1]","REAL", 100, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "energytotalyesterday","Energy_Total[1]","REAL", 10, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmflowrate","alarm_Flowmeter","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmintakepressure","alarm_IntakePressure","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmintaketemperature","alarm_IntakeTemperature","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmtubingpressure","alarm_TubingPressure","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmvfd","alarm_VFD","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmlockout","alarm_Lockout","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "runpermissive","Run_Permissive_INT","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "startpermissive","Start_Permissive_INT","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "startcommand","cmd_Start","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "stopcommand","cmd_Stop","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "flowsetpoint","cfg_PID_FlowSP","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "fluidlevelsetpoint","cfg_PID_FluidLevelSP","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "manualfrequencysetpoint","cfg_PID_ManualSP","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "tubingpressuresetpoint","cfg_PID_TubingPressureSP","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmfluidlevel","alarm_FluidLevel","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "pressureshutdownlimit","AIn_IntakePressure.Val_LoLim","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "pressurestartuplimit","AIn_IntakePressure.Val_HiLim","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "temperatureshutdownlimit","AIn_IntakeTemperature.Val_HiLim","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "temperaturestartuplimit","AIn_IntakeTemperature.Val_LoLim","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "sensorheight","cfg_DHSensorDistToIntake","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "last_vfd_fault_code","PowerFlex755.Val_LastFaultCode","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "vfd_fault","sts_CurrentVFDFaultCode","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "controllerfault_io","ControllerFault_IO","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "controllerfault_program","ControllerFault_Program","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "minvfdfrequency","PowerFlex755.Cfg_MinSpdRef","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "maxvfdfrequency","PowerFlex755.Cfg_MaxSpdRef","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "hartnettotal","in_HART_Flowmeter_Net","REAL", 100, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "hartfwdtotal","in_HART_Flowmeter_Fwd","REAL", 100, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "hartrevtotal","in_HART_Flowmeter_Rev","REAL", 100, 3600, plc_type="CLX")
|
||||
]
|
||||
264
meshifyDrivers/advvfdipp/advvfdipp.py
Normal file
264
meshifyDrivers/advvfdipp/advvfdipp.py
Normal file
@@ -0,0 +1,264 @@
|
||||
"""Driver for advvfdipp"""
|
||||
|
||||
import threading
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from random import randint
|
||||
from datetime import datetime as dt
|
||||
# PERSISTENCE FILE
|
||||
import persistence
|
||||
PERSIST = persistence.load("extra_data.json")
|
||||
if not PERSIST:
|
||||
PERSIST = {'ip_address': '192.168.1.10', 'download_pycomm': True, 'flowmeter_units': 'GPM'}
|
||||
persistence.store(PERSIST, "extra_data.json")
|
||||
os.system('echo "" > /root/python_firmware/drivers/modbusMap.p')
|
||||
PLC_IP_ADDRESS = PERSIST['ip_address']
|
||||
from device_base import deviceBase
|
||||
import urllib
|
||||
if PERSIST['download_pycomm']:
|
||||
try:
|
||||
urllib.urlretrieve('http://s3.amazonaws.com/pocloud-drivers/pycomm/clx.py', '/root/python_firmware/pycomm/ab_comm/clx.py')
|
||||
urllib.urlretrieve('http://s3.amazonaws.com/pocloud-drivers/pycomm/cip_base.py', '/root/python_firmware/pycomm/cip/cip_base.py')
|
||||
PERSIST['download_pycomm'] = False
|
||||
persistence.store(PERSIST, "extra_data.json")
|
||||
except Exception as e:
|
||||
print("Could not download latest pycomm update: {}".format(e))
|
||||
|
||||
from Channel import PLCChannel, ModbusChannel,read_tag, write_tag, TAG_DATAERROR_SLEEPTIME
|
||||
from utilities import get_public_ip_address, get_private_ip_address, get_additional_tags, convert_int
|
||||
from file_logger import filelogger as log
|
||||
from Tags import tags
|
||||
from runtimeStats import RuntimeStats as RTS
|
||||
|
||||
path = "/root/python_firmware/drivers/additional_tags.py"
|
||||
|
||||
f = open(path, "a+")
|
||||
f.seek(0)
|
||||
if os.stat(path).st_size == 0:
|
||||
f.write("from Channel import PLCChannel, ModbusChannel\n")
|
||||
f.write("from advvfdipp import PLC_IP_ADDRESS\n")
|
||||
f.write("additional_tags = []")
|
||||
f.close()
|
||||
|
||||
|
||||
from additional_tags import additional_tags
|
||||
|
||||
_ = None
|
||||
|
||||
log.info("advvfdipp startup")
|
||||
|
||||
# GLOBAL VARIABLES
|
||||
WAIT_FOR_CONNECTION_SECONDS = 30
|
||||
IP_CHECK_PERIOD = 60
|
||||
WATCHDOG_ENABLE = False
|
||||
WATCHDOG_CHECK_PERIOD = 60
|
||||
WATCHDOG_SEND_PERIOD = 3600 # Seconds, the longest amount of time before sending the watchdog status
|
||||
|
||||
CHANNELS = tags + additional_tags
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
"""Start class required by Meshify."""
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None,
|
||||
companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
"""Initialize the driver."""
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q,
|
||||
mcu=mcu, companyId=companyId, offset=offset,
|
||||
mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.version = "18"
|
||||
self.finished = threading.Event()
|
||||
self.force_send = False
|
||||
self.public_ip_address = ""
|
||||
self.public_ip_address_last_checked = 0
|
||||
self.watchdog = False
|
||||
self.watchdog_last_checked = 0
|
||||
self.watchdog_last_sent = 0
|
||||
self.ping_counter = 0
|
||||
self.rts = RTS()
|
||||
self.rts.loadDataFromFile()
|
||||
self.rts.saveDataToFile()
|
||||
self.rts.manageTime()
|
||||
self.today = dt.now().date()
|
||||
threading.Thread.start(self)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
"""Register the driver."""
|
||||
# self.sendtodb("log", "BOOM! Booted.", 0)
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""Actually run the driver."""
|
||||
for i in range(0, WAIT_FOR_CONNECTION_SECONDS):
|
||||
print("advvfdipp driver will start in {} seconds".format(WAIT_FOR_CONNECTION_SECONDS - i))
|
||||
time.sleep(1)
|
||||
log.info("BOOM! Starting advvfdipp driver...")
|
||||
|
||||
|
||||
#self._check_watchdog()
|
||||
self._check_ip_address()
|
||||
|
||||
self.nodes["advvfdipp_0199"] = self
|
||||
try:
|
||||
if PERSIST['flowmeter_units']:
|
||||
self.sendtodbDev(1, 'flowunits', PERSIST['flowmeter_units'], 0, 'advvfdipp')
|
||||
else:
|
||||
PERSIST['flowmeter_units'] = "GPM"
|
||||
persistence.store(PERSIST, "extra_data.json")
|
||||
self.sendtodbDev(1, 'flowunits', PERSIST['flowmeter_units'], 0, 'advvfdipp')
|
||||
except:
|
||||
PERSIST['flowmeter_units'] = "GPM"
|
||||
persistence.store(PERSIST, "extra_data.json")
|
||||
self.sendtodbDev(1, 'flowunits', PERSIST['flowmeter_units'], 0, 'advvfdipp')
|
||||
send_loops = 0
|
||||
convert_list = ["Device_Status_INT","sts_PID_Control","Downhole_Sensor_Status_INT","alarm_Flowmeter","alarm_IntakePressure",
|
||||
"alarm_IntakeTemperature","alarm_TubingPressure","alarm_VFD","alarm_Lockout","alarm_FluidLevel","Run_Permissive_INT",
|
||||
"Start_Permissive_INT","PowerFlex755.Val_LastFaultCode","sts_CurrentVFDFaultCode"]
|
||||
while True:
|
||||
now = time.time()
|
||||
if self.force_send:
|
||||
log.warning("FORCE SEND: TRUE")
|
||||
if int(time.time()) % 600 == 0 or self.force_send:
|
||||
if self.force_send:
|
||||
payload = {"ts": time.time()*1000, "values": {}}
|
||||
else:
|
||||
payload = {"ts": round(time.time()/600)*600*1000, "values": {}}
|
||||
for chan in CHANNELS:
|
||||
try:
|
||||
val = chan.read()
|
||||
if "hart" in chan.mesh_name and val == None:
|
||||
val = 0.0
|
||||
|
||||
if chan.plc_tag in convert_list:
|
||||
converted_value = convert_int(chan.plc_tag, val)
|
||||
payload["values"][chan.mesh_name] = converted_value
|
||||
if chan.mesh_name == "wellstatus":
|
||||
if converted_value == "Running" and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"]:
|
||||
self.rts.startRun()
|
||||
self.rts.saveDataToFile()
|
||||
elif self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"] and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["end"]:
|
||||
self.rts.endRun()
|
||||
self.rts.saveDataToFile()
|
||||
payload["values"]["percentRunTime30Days"] = self.rts.calculateRunPercentMultiDay()
|
||||
elif chan.mesh_name == "vfdfrequency":
|
||||
if val > 0:
|
||||
self.rts.addHertzDataPoint(val)
|
||||
self.rts.saveDataToFile()
|
||||
payload["values"][chan.mesh_name] = val
|
||||
payload["values"]["avgFrequency30Days"] = self.rts.calculateAverageHertzMultiDay()
|
||||
else:
|
||||
payload["values"][chan.mesh_name] = val
|
||||
except Exception as e:
|
||||
log.info("Error: {}".format(e))
|
||||
try:
|
||||
self.rts.manageTime()
|
||||
if dt.now().date() != self.today:
|
||||
payload["values"]["avgFrequency30Days"] = self.rts.calculateAverageHertzMultiDay()
|
||||
payload["values"]["percentRunTime30Days"] = self.rts.calculateRunPercentMultiDay()
|
||||
self.today = dt.now().date()
|
||||
except Exception as e:
|
||||
log.error("Error: {}".format(e))
|
||||
self._check_ip_address()
|
||||
payload["values"]["public_ip_address"] = self.public_ip_address
|
||||
payload["values"]["private_ip_address"] = self.private_ip_address
|
||||
self.sendToTB(json.dumps(payload))
|
||||
self.sendToTBAttributes(json.dumps({"latestReportTime": round(time.time()/600)*600*1000}))
|
||||
time.sleep(10) #sleep for 30 seconds after a full poll
|
||||
# print("advvfdipp driver still alive...")
|
||||
if self.force_send:
|
||||
if send_loops > 2:
|
||||
log.warning("Turning off force_send")
|
||||
self.force_send = False
|
||||
send_loops = 0
|
||||
else:
|
||||
send_loops += 1
|
||||
|
||||
if WATCHDOG_ENABLE:
|
||||
if (now - self.watchdog_last_checked) > WATCHDOG_CHECK_PERIOD:
|
||||
self._check_watchdog()
|
||||
|
||||
if (now - self.public_ip_address_last_checked) > IP_CHECK_PERIOD:
|
||||
self._check_ip_address()
|
||||
|
||||
def _check_watchdog(self):
|
||||
"""Check the watchdog and send to Meshify if changed or stale."""
|
||||
test_watchdog = self.advvfdipp_watchdog()
|
||||
now = time.time()
|
||||
self.watchdog_last_checked = now
|
||||
if test_watchdog != self.watchdog or (now - self.watchdog_last_sent) > WATCHDOG_SEND_PERIOD:
|
||||
self.sendtodbDev(1, 'watchdog', test_watchdog, 0, 'advvfdipp')
|
||||
self.watchdog = test_watchdog
|
||||
self.watchdog_last_sent = now
|
||||
|
||||
|
||||
def _check_ip_address(self):
|
||||
"""Check the public IP address and send to Meshify if changed."""
|
||||
self.public_ip_address_last_checked = time.time()
|
||||
test_public_ip = get_public_ip_address()
|
||||
test_public_ip = test_public_ip
|
||||
test_private_ip = get_private_ip_address()
|
||||
if not test_public_ip == self.public_ip_address and not test_public_ip == "0.0.0.0":
|
||||
#self.sendtodbDev(1, 'public_ip_address', test_public_ip, 0, 'tankalarms')
|
||||
self.public_ip_address = test_public_ip
|
||||
if not test_private_ip == self.private_ip_address:
|
||||
#self.sendtodbDev(1, 'private_ip_address', test_private_ip, 0, 'tankalarms')
|
||||
self.private_ip_address = test_private_ip
|
||||
hostname = "8.8.8.8"
|
||||
response = 1
|
||||
try:
|
||||
response = os.system("ping -c 1 " + hostname + " > /dev/null 2>&1")
|
||||
except Exception as e:
|
||||
print("Something went wrong in ping: {}".format(e))
|
||||
|
||||
#and then check the response...
|
||||
if response == 0:
|
||||
print(hostname, 'is up!')
|
||||
self.ping_counter = 0
|
||||
else:
|
||||
print(hostname, 'is down!')
|
||||
self.ping_counter += 1
|
||||
|
||||
if self.ping_counter >= 3:
|
||||
print("Rebooting because no internet detected")
|
||||
os.system('reboot')
|
||||
|
||||
|
||||
def advvfdipp_watchdog(self):
|
||||
"""Write a random integer to the PLC and then 1 seconds later check that it has been decremented by 1."""
|
||||
randval = randint(0, 32767)
|
||||
write_tag(str(PLC_IP_ADDRESS), 'watchdog_INT', randval, plc_type="CLX")
|
||||
time.sleep(1)
|
||||
watchdog_val = read_tag(str(PLC_IP_ADDRESS), 'watchdog_INT', plc_type="CLX")
|
||||
try:
|
||||
return (randval - 1) == watchdog_val[0]
|
||||
except (KeyError, TypeError):
|
||||
return False
|
||||
|
||||
def advvfdipp_sync(self, name, value):
|
||||
"""Sync all data from the driver."""
|
||||
self.force_send = True
|
||||
# self.sendtodb("log", "synced", 0)
|
||||
return True
|
||||
|
||||
def advvfdipp_writeplctag(self, name, value):
|
||||
"""Write a value to the PLC."""
|
||||
new_val = json.loads(str(value).replace("'", '"'))
|
||||
tag_n = str(new_val['tag']) # "cmd_Start"
|
||||
val_n = new_val['val']
|
||||
write_res = write_tag(str(PLC_IP_ADDRESS), tag_n, val_n, plc_type="CLX")
|
||||
print("Result of advvfdipp_writeplctag(self, {}, {}) = {}".format(name, value, write_res))
|
||||
if write_res is None:
|
||||
write_res = "Error writing to PLC..."
|
||||
return write_res
|
||||
|
||||
def advvfdipp_flowunits(self, name, value):
|
||||
new_val = json.loads(str(value).replace("'", '"'))
|
||||
PERSIST['flowmeter_units'] = new_val
|
||||
persistence.store(PERSIST, "extra_data.json")
|
||||
self.sendtodbDev(1, 'flowunits', PERSIST['flowmeter_units'], 0, 'advvfdipp')
|
||||
15
meshifyDrivers/advvfdipp/config.txt
Normal file
15
meshifyDrivers/advvfdipp/config.txt
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"files": {
|
||||
"file3": "file_logger.py",
|
||||
"file2": "Channel.py",
|
||||
"file1": "advvfdipp.py",
|
||||
"file6": "persistence.py",
|
||||
"file5": "utilities.py",
|
||||
"file4": "Tags.py",
|
||||
"file5": "runtimeStats.py"
|
||||
},
|
||||
"deviceName": "advvfdipp",
|
||||
"releaseVersion": "19",
|
||||
"driverFileName": "advvfdipp.py",
|
||||
"driverId": "0100"
|
||||
}
|
||||
205
meshifyDrivers/advvfdipp/device_base.py
Normal file
205
meshifyDrivers/advvfdipp/device_base.py
Normal file
@@ -0,0 +1,205 @@
|
||||
import types
|
||||
import traceback
|
||||
import binascii
|
||||
import threading
|
||||
import time
|
||||
import thread
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
class deviceBase():
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
self.offset = offset
|
||||
self.company = companyId
|
||||
self.name = name
|
||||
self.number = number
|
||||
self.q = Q
|
||||
self.deviceName = name + '_[' + mac + ':' + number[0:2] + ':' + number[2:] + ']!'
|
||||
self.chName = "M1" + '_[' + mac + ':'
|
||||
self.chName2 = '_[' + mac + ':'
|
||||
print 'device name is:'
|
||||
print self.deviceName
|
||||
mac2 = mac.replace(":", "")
|
||||
self.mac = mac2.upper()
|
||||
self.address = 1
|
||||
self.debug = True
|
||||
self.mcu = mcu
|
||||
self.firstRun = True
|
||||
self.mqtt = mqtt
|
||||
self.nodes = Nodes
|
||||
#local dictionary of derived nodes ex: localNodes[tank_0199] = self
|
||||
self.localNodes = {}
|
||||
os.system("chmod 777 /root/reboot")
|
||||
os.system("echo nameserver 8.8.8.8 > /etc/resolv.conf")
|
||||
|
||||
|
||||
def sendtodbLoc(self, ch, channel, value, timestamp, deviceName, mac):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
#make the techname
|
||||
lst = textwrap.wrap(str(mac), width=2)
|
||||
tech = ""
|
||||
for i in range(len(lst)):
|
||||
tech += lst[i].lower() + ":"
|
||||
|
||||
|
||||
chName2 = '_[' + tech
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
if len(ch) > 2:
|
||||
ch = ch[:-2]
|
||||
|
||||
dname = deviceName + chName2 + str(ch) + ":98]!"
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDevJSON(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLora(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if ":" not in ch:
|
||||
ch = ch[0:2] + ":" + ch[2:4]
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch).replace(':', "")
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + "]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDev(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendToTB(self, payload):
|
||||
topic = 'v1/devices/me/telemetry'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendToTBAttributes(self, payload):
|
||||
topic = 'v1/devices/me/attributes'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendtodbCH(self, ch, channel, value, timestamp):
|
||||
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(ch)
|
||||
|
||||
dname = self.chName + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodb(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbJSON(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
def getTime(self):
|
||||
return str(int(time.time() + int(self.offset)))
|
||||
|
||||
|
||||
|
||||
|
||||
14
meshifyDrivers/advvfdipp/driverConfig.json
Normal file
14
meshifyDrivers/advvfdipp/driverConfig.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "advvfdipp",
|
||||
"driverFilename": "advvfdipp.py",
|
||||
"driverId": "0000",
|
||||
"additionalDriverFiles": [
|
||||
"utilities.py",
|
||||
"persistence.py",
|
||||
"Channel.py",
|
||||
"logger.py",
|
||||
"Tags.py"
|
||||
],
|
||||
"version": 1,
|
||||
"s3BucketName": "advvfdipp"
|
||||
}
|
||||
18
meshifyDrivers/advvfdipp/file_logger.py
Normal file
18
meshifyDrivers/advvfdipp/file_logger.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Logging setup for advvfdipp"""
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import sys
|
||||
|
||||
log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s')
|
||||
log_file = './advvfdipp.log'
|
||||
my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=500*1024,
|
||||
backupCount=2, encoding=None, delay=0)
|
||||
my_handler.setFormatter(log_formatter)
|
||||
my_handler.setLevel(logging.INFO)
|
||||
filelogger = logging.getLogger('advvfdipp')
|
||||
filelogger.setLevel(logging.INFO)
|
||||
filelogger.addHandler(my_handler)
|
||||
|
||||
console_out = logging.StreamHandler(sys.stdout)
|
||||
console_out.setFormatter(log_formatter)
|
||||
filelogger.addHandler(console_out)
|
||||
21
meshifyDrivers/advvfdipp/persistence.py
Normal file
21
meshifyDrivers/advvfdipp/persistence.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Data persistance functions."""
|
||||
# if more advanced persistence is needed, use a sqlite database
|
||||
import json
|
||||
|
||||
|
||||
def load(filename="persist.json"):
|
||||
"""Load persisted settings from the specified file."""
|
||||
try:
|
||||
with open(filename, 'r') as persist_file:
|
||||
return json.load(persist_file)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def store(persist_obj, filename="persist.json"):
|
||||
"""Store the persisting settings into the specified file."""
|
||||
try:
|
||||
with open(filename, 'w') as persist_file:
|
||||
return json.dump(persist_obj, persist_file, indent=4)
|
||||
except Exception:
|
||||
return False
|
||||
172
meshifyDrivers/advvfdipp/runtimeStats.py
Normal file
172
meshifyDrivers/advvfdipp/runtimeStats.py
Normal file
@@ -0,0 +1,172 @@
|
||||
from datetime import datetime as dt
|
||||
import time
|
||||
import json
|
||||
import math
|
||||
|
||||
class RuntimeStats:
|
||||
|
||||
def __init__(self):
|
||||
self.runs = {}
|
||||
self.currentRun = 0
|
||||
self.today = ""
|
||||
self.todayString = ""
|
||||
|
||||
def manageTime(self):
|
||||
if self.todayString != dt.strftime(dt.today(), "%Y-%m-%d"):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.mktime(dt.strptime(self.todayString + " 23:59:59", "%Y-%m-%d %H:%M:%S").timetuple())
|
||||
self.addDay()
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
while (dt.strptime(days[-1],"%Y-%m-%d") - dt.strptime(days[0], "%Y-%m-%d")).days > 30:
|
||||
self.removeDay(day=days[0])
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
|
||||
def addHertzDataPoint(self, frequency):
|
||||
if frequency > 0:
|
||||
self.manageTime()
|
||||
try:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"].append(frequency)
|
||||
except:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"] = [frequency]
|
||||
|
||||
def startRun(self):
|
||||
if self.checkRunning():
|
||||
self.endRun()
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] = time.time()
|
||||
|
||||
def endRun(self):
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.time()
|
||||
self.currentRun += 1
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def checkRunning(self):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def addDay(self):
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
self.currentRun = 1
|
||||
self.runs[self.todayString] = {}
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def countRunsDay(self, day=None):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
return len(self.runs[day].keys())
|
||||
|
||||
def countRunsMultiDay(self, numDays=30):
|
||||
total_runs = 0
|
||||
for day in list(self.runs.keys()):
|
||||
total_runs += self.countRunsDay(day=day)
|
||||
return total_runs
|
||||
|
||||
def calculateAverageHertzDay(self, day=None, returnArray=False):
|
||||
dayFrequencies = []
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
try:
|
||||
dayFrequencies += self.runs[day][run]["frequencies"]
|
||||
except Exception as e:
|
||||
print("{} missing frequency data for {}".format(day,run))
|
||||
if returnArray:
|
||||
return dayFrequencies
|
||||
return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)
|
||||
|
||||
def calculateAverageHertzMultiDay(self, numDays=30):
|
||||
self.manageTime()
|
||||
frequencies = []
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
try:
|
||||
frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)
|
||||
except Exception as e:
|
||||
print("{} missing frequency data".format(day))
|
||||
if len(frequencies):
|
||||
return round(math.fsum(frequencies)/len(frequencies), 2)
|
||||
return 0
|
||||
|
||||
def calculateRunTimeDay(self, day=None, convertToHours=True):
|
||||
total_time = 0
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
total_time = self.runs[day][run]["end"] - self.runs[day][run]["start"] + total_time
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):
|
||||
total_time = 0
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
total_time += self.calculateRunTimeDay(day=day, convertToHours=False)
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunPercentDay(self, day=None, precise=False):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
if precise:
|
||||
return (self.calculateRunTimeDay(day=day)/24) * 100
|
||||
return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)
|
||||
|
||||
|
||||
def calculateRunPercentMultiDay(self, numDays=30, precise=False):
|
||||
self.manageTime()
|
||||
if precise:
|
||||
return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100
|
||||
return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)
|
||||
|
||||
def removeDay(self, day=None):
|
||||
if not day:
|
||||
raise Exception("Day can not be None")
|
||||
print("removing day {}".format(day))
|
||||
del self.runs[day]
|
||||
|
||||
def convertSecondstoHours(self, seconds):
|
||||
return round(seconds / (60*60),2)
|
||||
|
||||
def loadDataFromFile(self, filePath="./runtimestats.json"):
|
||||
try:
|
||||
with open(filePath, "r") as f:
|
||||
temp = json.load(f)
|
||||
self.runs = temp["data"]
|
||||
self.currentRun = temp["current_run"]
|
||||
self.today = dt.strptime(temp["current_day"], "%Y-%m-%d")
|
||||
self.todayString = temp["current_day"]
|
||||
self.manageTime()
|
||||
except:
|
||||
print("Could not find file at {}".format(filePath))
|
||||
print("creating file")
|
||||
self.addDay()
|
||||
try:
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
def saveDataToFile(self, filePath="./runtimestats.json"):
|
||||
try:
|
||||
print("Saving Runs")
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
240
meshifyDrivers/advvfdipp/utilities.py
Normal file
240
meshifyDrivers/advvfdipp/utilities.py
Normal file
@@ -0,0 +1,240 @@
|
||||
"""Utility functions for the driver."""
|
||||
import socket
|
||||
import struct
|
||||
from Channel import PLCChannel
|
||||
import urllib
|
||||
import contextlib
|
||||
|
||||
def get_private_ip_address():
|
||||
"""Find the private IP Address of the host device."""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
try:
|
||||
sock.connect(("8.8.8.8", 80))
|
||||
except Exception as e:
|
||||
return e
|
||||
ip_address = sock.getsockname()[0]
|
||||
sock.close()
|
||||
return ip_address
|
||||
|
||||
def get_public_ip_address():
|
||||
ip_address = "0.0.0.0"
|
||||
try:
|
||||
with contextlib.closing(urllib.urlopen("http://checkip.amazonaws.com")) as url:
|
||||
ip_address = url.read()
|
||||
except Exception as e:
|
||||
print("could not resolve check IP: {}".format(e))
|
||||
return ip_address
|
||||
return ip_address[:-1]
|
||||
|
||||
def int_to_float16(int_to_convert):
|
||||
"""Convert integer into float16 representation."""
|
||||
bin_rep = ('0' * 16 + '{0:b}'.format(int_to_convert))[-16:]
|
||||
sign = 1.0
|
||||
if int(bin_rep[0]) == 1:
|
||||
sign = -1.0
|
||||
exponent = float(int(bin_rep[1:6], 2))
|
||||
fraction = float(int(bin_rep[6:17], 2))
|
||||
|
||||
if exponent == float(0b00000):
|
||||
return sign * 2 ** -14 * fraction / (2.0 ** 10.0)
|
||||
elif exponent == float(0b11111):
|
||||
if fraction == 0:
|
||||
return sign * float("inf")
|
||||
return float("NaN")
|
||||
frac_part = 1.0 + fraction / (2.0 ** 10.0)
|
||||
return sign * (2 ** (exponent - 15)) * frac_part
|
||||
|
||||
|
||||
def ints_to_float(int1, int2):
|
||||
"""Convert 2 registers into a floating point number."""
|
||||
mypack = struct.pack('>HH', int1, int2)
|
||||
f_unpacked = struct.unpack('>f', mypack)
|
||||
print("[{}, {}] >> {}".format(int1, int2, f_unpacked[0]))
|
||||
return f_unpacked[0]
|
||||
|
||||
|
||||
def degf_to_degc(temp_f):
|
||||
"""Convert deg F to deg C."""
|
||||
return (temp_f - 32.0) * (5.0/9.0)
|
||||
|
||||
|
||||
def degc_to_degf(temp_c):
|
||||
"""Convert deg C to deg F."""
|
||||
return temp_c * 1.8 + 32.0
|
||||
|
||||
def get_additional_tags(tag_dict):
|
||||
tags_array = tag_dict['additional_tags']
|
||||
channel_array = []
|
||||
for x in tags_array:
|
||||
try:
|
||||
print "Making channel {}".format(x)
|
||||
channel_array.append(PLCChannel(tag_dict['ip_address'], x['mesh_name'], x['plc_tag'], x['data_type'],x['chg_threshold'],x['guarantee_sec'],plc_type='CLX'))
|
||||
except Exception:
|
||||
print "Nothing to write or bad key"
|
||||
return channel_array
|
||||
|
||||
def convert_int(plc_tag, value):
|
||||
well_status_codes = {
|
||||
0: "Running",
|
||||
1: "Pumped Off",
|
||||
2: "Alarmed",
|
||||
3: "Locked Out",
|
||||
4: "Stopped"
|
||||
}
|
||||
|
||||
pid_control_codes = {
|
||||
0: "Flow",
|
||||
1: "Fluid Level",
|
||||
2: "Tubing Pressure",
|
||||
3: "Manual"
|
||||
}
|
||||
|
||||
downhole_codes = {
|
||||
0: "OK",
|
||||
1: "Connecting",
|
||||
2: "Open Circuit",
|
||||
3: "Shorted",
|
||||
4: "Cannot Decode"
|
||||
}
|
||||
|
||||
permissive_codes = {
|
||||
0: "OK",
|
||||
1: "Flow",
|
||||
2: "Intake Pressure",
|
||||
3: "Intake Temperature",
|
||||
4: "Tubing Pressure",
|
||||
5: "VFD",
|
||||
6: "Fluid Level",
|
||||
7: "Min. Downtime"
|
||||
}
|
||||
|
||||
alarm_codes = {
|
||||
0: "OK",
|
||||
1: "Alarm"
|
||||
}
|
||||
|
||||
alarm_vfd_codes = {
|
||||
0: "OK",
|
||||
1: "Locked Out"
|
||||
}
|
||||
|
||||
vfd_fault_codes = {
|
||||
0: "No Fault",
|
||||
2: "Auxiliary Input",
|
||||
3: "Power Loss",
|
||||
4: "UnderVoltage",
|
||||
5: "OverVoltage",
|
||||
7: "Motor Overload",
|
||||
8: "Heatsink OverTemp",
|
||||
9: "Thermister OverTemp",
|
||||
10: "Dynamic Brake OverTemp",
|
||||
12: "Hardware OverCurrent",
|
||||
13: "Ground Fault",
|
||||
14: "Ground Warning",
|
||||
15: "Load Loss",
|
||||
17: "Input Phase Loss",
|
||||
18: "Motor PTC Trip",
|
||||
19: "Task Overrun",
|
||||
20: "Torque Prove Speed Band",
|
||||
21: "Output Phase Loss",
|
||||
24: "Decel Inhibit",
|
||||
25: "OverSpeed Limit",
|
||||
26: "Brake Slipped",
|
||||
27: "Torque Prove Conflict",
|
||||
28: "TP Encls Confict",
|
||||
29: "Analog In Loss",
|
||||
33: "Auto Restarts Exhausted",
|
||||
35: "IPM OverCurrent",
|
||||
36: "SW OverCurrent",
|
||||
38: "Phase U to Ground",
|
||||
39: "Phase V to Ground",
|
||||
40: "Phase W to Ground",
|
||||
41: "Phase UV Short",
|
||||
42: "Phase VW Short",
|
||||
43: "Phase WU Short",
|
||||
44: "Phase UNeg to Ground",
|
||||
45: "Phase VNeg to Ground",
|
||||
46: "Phase WNeg to Ground",
|
||||
48: "System Defaulted",
|
||||
49: "Drive Powerup",
|
||||
51: "Clear Fault Queue",
|
||||
55: "Control Board Overtemp",
|
||||
59: "Invalid Code",
|
||||
61: "Shear Pin 1",
|
||||
62: "Shear Pin 2",
|
||||
64: "Drive Overload",
|
||||
66: "OW Torque Level",
|
||||
67: "Pump Off",
|
||||
71: "Port 1 Adapter",
|
||||
72: "Port 2 Adapter",
|
||||
73: "Port 3 Adapter",
|
||||
74: "Port 4 Adapter",
|
||||
75: "Port 5 Adapter",
|
||||
76: "Port 6 Adapter",
|
||||
77: "IR Volts Range",
|
||||
78: "FluxAmps Ref Range",
|
||||
79: "Excessive Load",
|
||||
80: "AutoTune Aborted",
|
||||
81: "Port 1 DPI Loss",
|
||||
82: "Port 2 DPI Loss",
|
||||
83: "Port 3 DPI Loss",
|
||||
84: "Port 4 DPI Loss",
|
||||
85: "Port 5 DPI Loss",
|
||||
86: "Port 6 DPI Loss",
|
||||
87: "IXo Voltage Range",
|
||||
91: "Primary Velocity Feedback Loss",
|
||||
93: "Hardware Enable Check",
|
||||
94: "Alternate Velocity Feedback Loss",
|
||||
95: "Auxiliary Velocity Feedback Loss",
|
||||
96: "Position Feedback Loss",
|
||||
97: "Auto Tach Switch",
|
||||
100: "Parameter Checksum",
|
||||
101: "Power Down NVS Blank",
|
||||
102: "NVS Not Blank",
|
||||
103: "Power Down NVS Incompatible",
|
||||
104: "Power Board Checksum",
|
||||
106: "Incompat MCB-PB",
|
||||
107: "Replaced MCB-PB",
|
||||
108: "Analog Calibration Checksum",
|
||||
110: "Invalid Power Board Data",
|
||||
111: "Power Board Invalid ID",
|
||||
112: "Power Board App Min Version",
|
||||
113: "Tracking DataError",
|
||||
115: "Power Down Table Full",
|
||||
116: "Power Down Entry Too Large",
|
||||
117: "Power Down Data Checksum",
|
||||
118: "Power Board Power Down Checksum",
|
||||
124: "App ID Changed",
|
||||
125: "Using Backup App",
|
||||
134: "Start on Power Up",
|
||||
137: "External Precharge Error",
|
||||
138: "Precharge Open",
|
||||
141: "Autotune Enc Angle",
|
||||
142: "Autotune Speed Restricted",
|
||||
143: "Autotune Current Regulator",
|
||||
144: "Autotune Inertia",
|
||||
145: "Autotune Travel",
|
||||
13035: "Net IO Timeout",
|
||||
13037: "Net IO Timeout"
|
||||
|
||||
}
|
||||
|
||||
plc_tags = {
|
||||
"Device_Status_INT": well_status_codes.get(value, "Invalid Code"),
|
||||
"sts_PID_Control": pid_control_codes.get(value, "Invalid Code"),
|
||||
"Downhole_Sensor_Status_INT": downhole_codes.get(value, "Invalid Code"),
|
||||
"alarm_Flowmeter": alarm_codes.get(value, "Invalid Code"),
|
||||
"alarm_IntakePressure": alarm_codes.get(value, "Invalid Code"),
|
||||
"alarm_IntakeTemperature": alarm_codes.get(value, "Invalid Code"),
|
||||
"alarm_TubingPressure": alarm_codes.get(value, "Invalid Code"),
|
||||
"alarm_VFD": alarm_codes.get(value, "Invalid Code"),
|
||||
"alarm_Lockout": alarm_vfd_codes.get(value, "Invalid Code"),
|
||||
"alarm_FluidLevel": alarm_codes.get(value, "Invalid Code"),
|
||||
"Run_Permissive_INT": permissive_codes.get(value, "Invalid Code"),
|
||||
"Start_Permissive_INT": permissive_codes.get(value, "Invalid Code"),
|
||||
"PowerFlex755.Val_LastFaultCode": vfd_fault_codes.get(value, "Invalid Code"),
|
||||
"sts_CurrentVFDFaultCode": vfd_fault_codes.get(value, "Invalid Code")
|
||||
}
|
||||
|
||||
return plc_tags.get(plc_tag, "Invalid Tag")
|
||||
|
||||
@@ -144,6 +144,11 @@ class deviceBase():
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendToTBAttributes(self, payload):
|
||||
topic = 'v1/devices/me/attributes'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendtodbCH(self, ch, channel, value, timestamp):
|
||||
|
||||
|
||||
|
||||
615
meshifyDrivers/piflow/Channel.py
Normal file
615
meshifyDrivers/piflow/Channel.py
Normal file
@@ -0,0 +1,615 @@
|
||||
"""Define Meshify channel class."""
|
||||
import time
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from pycomm.cip.cip_base import CommError, DataError
|
||||
from file_logger import filelogger as log
|
||||
import minimalmodbus
|
||||
|
||||
minimalmodbus.BAUDRATE = 9600
|
||||
minimalmodbus.STOPBITS = 1
|
||||
|
||||
TAG_DATAERROR_SLEEPTIME = 5
|
||||
|
||||
def binarray(intval):
|
||||
"""Split an integer into its bits."""
|
||||
bin_string = '{0:08b}'.format(intval)
|
||||
bin_arr = [i for i in bin_string]
|
||||
bin_arr.reverse()
|
||||
return bin_arr
|
||||
|
||||
|
||||
def read_tag(addr, tag, plc_type="CLX"):
|
||||
"""Read a tag from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
val = clx.read_tag(tag)
|
||||
clx.close()
|
||||
return val
|
||||
except DataError as err:
|
||||
clx.close()
|
||||
time.sleep(TAG_DATAERROR_SLEEPTIME)
|
||||
log.error("Data Error during readTag({}, {}): {}".format(addr, tag, err))
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
clx.close()
|
||||
log.error("Could not connect during readTag({}, {})".format(addr, tag))
|
||||
except AttributeError as err:
|
||||
clx.close()
|
||||
log.error("AttributeError during readTag({}, {}): \n{}".format(addr, tag, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
def read_array(addr, tag, start, end, plc_type="CLX"):
|
||||
"""Read an array from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
val = clx.read_tag(tag_w_index)
|
||||
arr_vals.append(round(val[0], 4))
|
||||
if arr_vals:
|
||||
clx.close()
|
||||
return arr_vals
|
||||
else:
|
||||
log.error("No length for {}".format(addr))
|
||||
clx.close()
|
||||
return False
|
||||
except Exception:
|
||||
log.error("Error during readArray({}, {}, {}, {})".format(addr, tag, start, end))
|
||||
err = clx.get_status()
|
||||
clx.close()
|
||||
log.error(err)
|
||||
clx.close()
|
||||
|
||||
|
||||
def write_tag(addr, tag, val, plc_type="CLX"):
|
||||
"""Write a tag value to the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
initial_val = clx.read_tag(tag)
|
||||
write_status = clx.write_tag(tag, val, initial_val[1])
|
||||
clx.close()
|
||||
return write_status
|
||||
except DataError as err:
|
||||
clx_err = clx.get_status()
|
||||
clx.close()
|
||||
log.error("--\nDataError during writeTag({}, {}, {}, plc_type={}) -- {}\n{}\n".format(addr, tag, val, plc_type, err, clx_err))
|
||||
|
||||
except CommError as err:
|
||||
clx_err = clx.get_status()
|
||||
log.error("--\nCommError during write_tag({}, {}, {}, plc_type={})\n{}\n--".format(addr, tag, val, plc_type, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
class Channel(object):
|
||||
"""Holds the configuration for a Meshify channel."""
|
||||
|
||||
def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
self.mesh_name = mesh_name
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def __str__(self):
|
||||
"""Create a string for the channel."""
|
||||
return "{}\nvalue: {}, last_send_time: {}".format(self.mesh_name, self.value, self.last_send_time)
|
||||
|
||||
def check(self, new_value, force_send=False):
|
||||
"""Check to see if the new_value needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.data_type == 'BOOL' or self.data_type == 'STRING' or type(new_value) == str:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.value != new_value:
|
||||
if self.map_:
|
||||
if not self.value == self.map_[new_value]:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
else:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
else:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif abs(self.value - new_value) > self.chg_threshold:
|
||||
send_needed = True
|
||||
send_reason = "change threshold"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
if send_needed:
|
||||
self.last_value = self.value
|
||||
if self.map_:
|
||||
try:
|
||||
self.value = self.map_[new_value]
|
||||
except KeyError:
|
||||
log.error("Cannot find a map value for {} in {} for {}".format(new_value, self.map_, self.mesh_name))
|
||||
self.value = new_value
|
||||
else:
|
||||
self.value = new_value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
pass
|
||||
|
||||
|
||||
def identity(sent):
|
||||
"""Return exactly what was sent to it."""
|
||||
return sent
|
||||
|
||||
def volume_units(vunit):
|
||||
units = {
|
||||
0: "cm cubed/s",
|
||||
1: "cm cubed/min",
|
||||
2: "cm cubed/h",
|
||||
3: "cm cubed/d",
|
||||
4: "dm cubed/s",
|
||||
5: "dm cubed/min",
|
||||
6: "dm cubed/h",
|
||||
7: "dm cubed/d",
|
||||
8: "m cubed/s",
|
||||
9: "m cubed/min",
|
||||
10: "m cubed/h",
|
||||
11: "m cubed/d",
|
||||
12: "ml/s",
|
||||
13: "ml/min",
|
||||
14: "ml/h",
|
||||
15: "ml/d",
|
||||
16: "l/s",
|
||||
17: "l/min",
|
||||
18: "l/h (+)",
|
||||
19: "l/d",
|
||||
20: "hl/s",
|
||||
21: "hl/min",
|
||||
22: "hl/h",
|
||||
23: "hl/d",
|
||||
24: "Ml/s",
|
||||
25: "Ml/min",
|
||||
26: "Ml/h",
|
||||
27: "Ml/d",
|
||||
32: "af/s",
|
||||
33: "af/min",
|
||||
34: "af/h",
|
||||
35: "af/d",
|
||||
36: "ft cubed/s",
|
||||
37: "ft cubed/min",
|
||||
38: "ft cubed/h",
|
||||
39: "ft cubed/d",
|
||||
40: "fl oz/s (us)",
|
||||
41: "fl oz/min (us)",
|
||||
42: "fl oz/h (us)",
|
||||
43: "fl oz/d (us)",
|
||||
44: "gal/s (us)",
|
||||
45: "gal/min (us)",
|
||||
46: "gal/h (us)",
|
||||
47: "gal/d (us)",
|
||||
48: "Mgal/s (us)",
|
||||
49: "Mgal/min (us)",
|
||||
50: "Mgal/h (us)",
|
||||
51: "Mgal/d (us)",
|
||||
52: "bbl/s (us;liq.)",
|
||||
53: "bbl/min (us;liq.)",
|
||||
54: "bbl/h (us;liq.)",
|
||||
55: "bbl/d (us;liq.)",
|
||||
56: "bbl/s (us;beer)",
|
||||
57: "bbl/min (us;beer)",
|
||||
58: "bbl/h (us;beer)",
|
||||
59: "bbl/d (us;beer)",
|
||||
60: "bbl/s (us;oil)",
|
||||
61: "bbl/min (us;oil)",
|
||||
62: "bbl/h (us;oil)",
|
||||
63: "bbl/d (us;oil)",
|
||||
64: "bbl/s (us;tank)",
|
||||
65: "bbl/min (us;tank)",
|
||||
66: "bbl/h (us;tank)",
|
||||
67: "bbl/d (us;tank)",
|
||||
68: "gal/s (imp)",
|
||||
69: "gal/min (imp)",
|
||||
70: "gal/h (imp)",
|
||||
71: "gal/d (imp)",
|
||||
72: "Mgal/s (imp)",
|
||||
73: "Mgal/min (imp)",
|
||||
74: "Mgal/h (imp)",
|
||||
75: "Mgal/d (imp)",
|
||||
76: "bbl/s (imp;beer)",
|
||||
77: "bbl/min (imp;beer)",
|
||||
78: "bbl/h (imp;beer)",
|
||||
79: "bbl/d (imp;beer)",
|
||||
80: "bbl/s (imp;oil)",
|
||||
81: "bbl/min (imp;oil)",
|
||||
82: "bbl/h (imp;oil)",
|
||||
83: "bbl/d (imp;oil)",
|
||||
88: "kgal/s (us)",
|
||||
89: "kgal/min (us)",
|
||||
90: "kgal/h (us)",
|
||||
91: "kgal/d (us)",
|
||||
92: "MMft cubed/s",
|
||||
93: "MMft cubed/min",
|
||||
94: "MMft cubed/h",
|
||||
96: "Mft cubed/d"
|
||||
}
|
||||
return units[vunit]
|
||||
|
||||
def totalizer_units(tunit):
|
||||
|
||||
units = {
|
||||
0: "cm cubed",
|
||||
1: "dm cubed",
|
||||
2: "m cubed",
|
||||
3: "ml",
|
||||
4: "l",
|
||||
5: "hl",
|
||||
6: "Ml Mega",
|
||||
8: "af",
|
||||
9: "ft cubed",
|
||||
10: "fl oz (us)",
|
||||
11: "gal (us)",
|
||||
12: "Mgal (us)",
|
||||
13: "bbl (us;liq.)",
|
||||
14: "bbl (us;beer)",
|
||||
15: "bbl (us;oil)",
|
||||
16: "bbl (us;tank)",
|
||||
17: "gal (imp)",
|
||||
18: "Mgal (imp)",
|
||||
19: "bbl (imp;beer)",
|
||||
20: "bbl (imp;oil)",
|
||||
22: "kgal (us)",
|
||||
23: "Mft cubed",
|
||||
50: "g",
|
||||
51: "kg",
|
||||
52: "t",
|
||||
53: "oz",
|
||||
54: "lb",
|
||||
55: "STon",
|
||||
100: "Nl",
|
||||
101: "Nm cubed",
|
||||
102: "Sm cubed",
|
||||
103: "Sft cubed",
|
||||
104: "Sl",
|
||||
105: "Sgal (us)",
|
||||
106: "Sbbl (us;liq.)",
|
||||
107: "Sgal (imp)",
|
||||
108: "Sbbl (us;oil)",
|
||||
109: "MMSft cubed",
|
||||
110: "Nhl",
|
||||
251: "None"
|
||||
}
|
||||
return units[tunit]
|
||||
|
||||
def int_to_bits(n,x):
|
||||
return pad_to_x([int(digit) for digit in bin(n)[2:]],x) # [2:] to chop off the "0b" part
|
||||
|
||||
def pad_to_x(n,x):
|
||||
while len(n) < x:
|
||||
n = [0] + n
|
||||
return n
|
||||
|
||||
def status_codes(n):
|
||||
|
||||
status_array = int_to_bits(n,16)
|
||||
status_low = {
|
||||
0: "Stopped;",
|
||||
1: "Operating in Forward;",
|
||||
2: "Operating in Reverse;",
|
||||
3: "DC operating;"
|
||||
}
|
||||
status_mid = {
|
||||
0: "",
|
||||
1: "Speed searching;",
|
||||
2: "Accelerating;",
|
||||
3: "At constant speed;",
|
||||
4: "Decelerating;",
|
||||
5: "Decelerating to stop;",
|
||||
6: "H/W OCS;",
|
||||
7: "S/W OCS;",
|
||||
8: "Dwell operating;"
|
||||
}
|
||||
status_high = {
|
||||
0: "Normal state",
|
||||
4: "Warning occurred",
|
||||
8: "Fault occurred"
|
||||
}
|
||||
values = {
|
||||
0: 8,
|
||||
1: 4,
|
||||
2: 2,
|
||||
3: 1
|
||||
}
|
||||
|
||||
stats_low = status_array[12:]
|
||||
stats_mid = status_array[8:12]
|
||||
stats_high = status_array[:4]
|
||||
low = 0
|
||||
mid = 0
|
||||
high = 0
|
||||
for x in range(4):
|
||||
if stats_low[x] == 1:
|
||||
low = low + values[x]
|
||||
if stats_mid[x] == 1:
|
||||
mid = mid + values[x]
|
||||
if stats_high[x] == 1:
|
||||
high = high + values[x]
|
||||
|
||||
return status_low[low] + " " + status_mid[mid] + ' ' + status_high[high]
|
||||
|
||||
def fault_code_a(n):
|
||||
|
||||
fault_code_array = int_to_bits(n,16)
|
||||
|
||||
""" fault = {
|
||||
0: "OCT",
|
||||
1: "OVT",
|
||||
2: "EXT-A",
|
||||
3: "EST",
|
||||
4: "COL",
|
||||
5: "GFT",
|
||||
6: "OHT",
|
||||
7: "ETH",
|
||||
8: "OLT",
|
||||
9: "Reserved",
|
||||
10: "EXT-B",
|
||||
11: "EEP",
|
||||
12: "FAN",
|
||||
13: "POT",
|
||||
14: "IOLT",
|
||||
15: "LVT"
|
||||
} """
|
||||
fault = {
|
||||
0: "Overload Trip",
|
||||
1: "Underload Trip",
|
||||
2: "Inverter Overload Trip",
|
||||
3: "E-Thermal Trip",
|
||||
4: "Ground Fault Trip",
|
||||
5: "Output Image Trip",
|
||||
6: "Inmput Imaging Trip",
|
||||
7: "Reserved",
|
||||
8: "Reserved",
|
||||
9: "NTC Trip",
|
||||
10: "Overcurrent Trip",
|
||||
11: "Overvoltage Trip",
|
||||
12: "External Trip",
|
||||
13: "Arm Short",
|
||||
14: "Over Heat Trip",
|
||||
15: "Fuse Open Trip"
|
||||
}
|
||||
|
||||
faults = []
|
||||
counter = 15
|
||||
for x in range(16):
|
||||
if fault_code_array[x] == 1:
|
||||
faults = [fault[counter]] + faults
|
||||
counter = counter - 1
|
||||
return ' '.join(faults)
|
||||
|
||||
def fault_code_b(n):
|
||||
|
||||
fault_code_array = int_to_bits(n,8)
|
||||
|
||||
""" fault = {
|
||||
0: "COM",
|
||||
1: "Reserved",
|
||||
2: "NTC",
|
||||
3: "REEP",
|
||||
4: "OC2",
|
||||
5: "NBR",
|
||||
6: "SAFA",
|
||||
7: "SAFB"
|
||||
} """
|
||||
fault = {
|
||||
0: "Reserved",
|
||||
1: "Reserved",
|
||||
2: "Reserved",
|
||||
3: "FAN Trip",
|
||||
4: "Reserved",
|
||||
5: "Reserved",
|
||||
6: "Pre PID Fail",
|
||||
7: "Bad contact at basic I/O board",
|
||||
8: "External Brake Trip",
|
||||
9: "No Motor Trip",
|
||||
10: "Bad Option Card",
|
||||
11: "Reserved",
|
||||
12: "Reserved",
|
||||
13: "Reserved",
|
||||
14: "Pre Over Heat Trip",
|
||||
15: "Reserved"
|
||||
}
|
||||
|
||||
faults = []
|
||||
counter = 7
|
||||
for x in range(8):
|
||||
if fault_code_array[x] == 1:
|
||||
faults = [fault[counter]] + faults
|
||||
counter = counter - 1
|
||||
return ' '.join(faults)
|
||||
|
||||
class ModbusChannel(Channel):
|
||||
"""Modbus channel object."""
|
||||
|
||||
def __init__(self, mesh_name, register_number, data_type, chg_threshold, guarantee_sec, channel_size=1, map_=False, write_enabled=False, transform_fn=identity, unit_number=1, scaling=0):
|
||||
"""Initialize the channel."""
|
||||
super(ModbusChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.mesh_name = mesh_name
|
||||
self.register_number = register_number
|
||||
self.channel_size = channel_size
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.transform_fn = transform_fn
|
||||
self.unit_number = unit_number
|
||||
self.instrument = minimalmodbus.Instrument('/dev/ttyS0', self.unit_number)
|
||||
self.scaling= scaling
|
||||
|
||||
def read(self):
|
||||
"""Return the transformed read value."""
|
||||
if self.data_type == "FLOAT":
|
||||
try:
|
||||
read_value = self.instrument.read_float(self.register_number,4,self.channel_size)
|
||||
except IOError as e:
|
||||
log.info(e)
|
||||
return None
|
||||
|
||||
elif self.data_type == "INTEGER" or self.data_type == "STRING":
|
||||
try:
|
||||
read_value = self.instrument.read_register(self.register_number, self.scaling, 4)
|
||||
except IOError as e:
|
||||
log.info(e)
|
||||
return None
|
||||
read_value = self.transform_fn(read_value)
|
||||
return read_value
|
||||
|
||||
def write(self, value):
|
||||
"""Write a value to a register"""
|
||||
if self.data_type == "FLOAT":
|
||||
value = float(value)
|
||||
elif self.data_type == "INTEGER":
|
||||
value = int(value)
|
||||
else:
|
||||
value = str(value)
|
||||
try:
|
||||
self.instrument.write_register(self.register_number,value, self.scaling, 16 if self.channel_size > 1 else 6 )
|
||||
return True
|
||||
except Exception as e:
|
||||
log.info("Failed to write value: {}".format(e))
|
||||
return False
|
||||
|
||||
|
||||
class PLCChannel(Channel):
|
||||
"""PLC Channel Object."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False, plc_type='CLX'):
|
||||
"""Initialize the channel."""
|
||||
super(PLCChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.plc_type = plc_type
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
plc_value = None
|
||||
if self.plc_tag and self.plc_ip:
|
||||
read_value = read_tag(self.plc_ip, self.plc_tag, plc_type=self.plc_type)
|
||||
if read_value:
|
||||
plc_value = read_value[0]
|
||||
|
||||
return plc_value
|
||||
|
||||
|
||||
class BoolArrayChannels(Channel):
|
||||
"""Hold the configuration for a set of boolean array channels."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
super(BoolArrayChannels, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def compare_values(self, new_val_dict):
|
||||
"""Compare new values to old values to see if the values need storing."""
|
||||
send = False
|
||||
for idx in new_val_dict:
|
||||
try:
|
||||
if new_val_dict[idx] != self.last_value[idx]:
|
||||
send = True
|
||||
except KeyError:
|
||||
log.error("Key Error in self.compare_values for index {}".format(idx))
|
||||
send = True
|
||||
return send
|
||||
|
||||
def read(self, force_send=False):
|
||||
"""Read the value and check to see if needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.plc_tag:
|
||||
val = read_tag(self.plc_ip, self.plc_tag)
|
||||
if val:
|
||||
bool_arr = binarray(val[0])
|
||||
new_val = {}
|
||||
for idx in self.map_:
|
||||
try:
|
||||
new_val[self.map_[idx]] = bool_arr[idx]
|
||||
except KeyError:
|
||||
log.error("Not able to get value for index {}".format(idx))
|
||||
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.compare_values(new_val):
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
|
||||
if send_needed:
|
||||
self.value = new_val
|
||||
self.last_value = self.value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
563
meshifyDrivers/piflow/PiFlow.py
Normal file
563
meshifyDrivers/piflow/PiFlow.py
Normal file
@@ -0,0 +1,563 @@
|
||||
"""Driver for PiFlow"""
|
||||
import os
|
||||
import threading
|
||||
import json
|
||||
import time
|
||||
from random import randint
|
||||
from datetime import datetime as dt
|
||||
from device_base import deviceBase
|
||||
import persistence
|
||||
from utilities import get_public_ip_address, get_private_ip_address
|
||||
from file_logger import filelogger as log
|
||||
"""import RPi.GPIO as GPIO
|
||||
|
||||
Relay_Ch1 = 26
|
||||
Relay_Ch2 = 20
|
||||
Relay_Ch3 = 21
|
||||
|
||||
GPIO.setwarnings(False)
|
||||
GPIO.setmode(GPIO.BCM)
|
||||
|
||||
GPIO.setup(Relay_Ch1,GPIO.OUT)
|
||||
GPIO.output(Relay_Ch1, GPIO.HIGH)
|
||||
GPIO.setup(Relay_Ch2,GPIO.OUT)
|
||||
GPIO.output(Relay_Ch2, GPIO.HIGH)
|
||||
GPIO.setup(Relay_Ch3,GPIO.OUT)
|
||||
GPIO.output(Relay_Ch3, GPIO.HIGH)
|
||||
"""
|
||||
_ = None
|
||||
os.system('sudo timedatectl set-timezone America/Chicago')
|
||||
log.info("PiFlow startup")
|
||||
|
||||
# GLOBAL VARIABLES
|
||||
WAIT_FOR_CONNECTION_SECONDS = 5
|
||||
IP_CHECK_PERIOD = 60
|
||||
|
||||
|
||||
# PERSISTENCE FILE
|
||||
PERSIST = persistence.load('persist.json')
|
||||
if not PERSIST:
|
||||
PERSIST = {
|
||||
'flowmeter': 247,
|
||||
'drive': 1,
|
||||
'isVFD': False,
|
||||
'drive_enabled': True,
|
||||
'state': False,
|
||||
'state_timer': 0,
|
||||
'plc_ip': '192.168.1.12',
|
||||
'yesterday_totalizer_1': dt.today().day,
|
||||
'yesterday_totalizer_2': dt.today().day,
|
||||
'yesterday_totalizer_3': dt.today().day,
|
||||
'yesterday_total_totalizer_1': 0,
|
||||
'yesterday_total_midnight_totalizer_1': 0,
|
||||
'yesterday_total_totalizer_2': 0,
|
||||
'yesterday_total_midnight_totalizer_2': 0,
|
||||
'yesterday_total_totalizer_3': 0,
|
||||
'yesterday_total_midnight_totalizer_3': 0
|
||||
}
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
"""
|
||||
try:
|
||||
if time.time() - PERSIST['state_timer'] >= 60:
|
||||
GPIO.output(Relay_Ch1, GPIO.HIGH)
|
||||
PERSIST['state'] = False
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
elif PERSIST['state']:
|
||||
GPIO.output(Relay_Ch1, GPIO.LOW)
|
||||
else:
|
||||
GPIO.output(Relay_Ch1, GPIO.HIGH)
|
||||
except:
|
||||
PERSIST['state'] = False
|
||||
PERSIST['state_time'] = time.time()
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
"""
|
||||
drive_enabled = PERSIST['drive_enabled']
|
||||
try:
|
||||
isVFD = PERSIST['isVFD']
|
||||
except:
|
||||
PERSIST['isVFD'] = False
|
||||
isVFD = PERSIST['isVFD']
|
||||
persistence.store(PERSIST)
|
||||
|
||||
try:
|
||||
plc_ip = PERSIST['plc_ip']
|
||||
except:
|
||||
PERSIST['plc_ip'] = '192.168.1.12'
|
||||
plc_ip = PERSIST['plc_ip']
|
||||
persistence.store(PERSIST)
|
||||
|
||||
from Tags import tags
|
||||
|
||||
CHANNELS = tags
|
||||
from runtimeStats import RuntimeStats as RTS
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
"""Start class required by Meshify."""
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None,
|
||||
companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
"""Initialize the driver."""
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q,
|
||||
mcu=mcu, companyId=companyId, offset=offset,
|
||||
mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.version = "28"
|
||||
self.finished = threading.Event()
|
||||
self.force_send = False
|
||||
self.public_ip_address = ""
|
||||
self.private_ip_address = ""
|
||||
self.public_ip_address_last_checked = 0
|
||||
self.status = ""
|
||||
self.alarm = ""
|
||||
self.rts = RTS()
|
||||
self.rts.loadDataFromFile()
|
||||
self.rts.saveDataToFile()
|
||||
|
||||
threading.Thread.start(self)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
"""Register the driver."""
|
||||
# self.sendtodb("log", "BOOM! Booted.", 0)
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""Actually run the driver."""
|
||||
for i in range(0, WAIT_FOR_CONNECTION_SECONDS):
|
||||
print("PiFlow driver will start in {} seconds".format(WAIT_FOR_CONNECTION_SECONDS - i))
|
||||
time.sleep(1)
|
||||
log.info("BOOM! Starting PiFlow driver...")
|
||||
|
||||
#self._check_watchdog()
|
||||
self._check_ip_address()
|
||||
|
||||
self.nodes["PiFlow_0199"] = self
|
||||
|
||||
send_loops = 0
|
||||
|
||||
while True:
|
||||
now = time.time()
|
||||
if self.force_send:
|
||||
log.warning("FORCE SEND: TRUE")
|
||||
if isVFD:
|
||||
status = {}
|
||||
for chan in CHANNELS[:24]: #build status/alarm strings
|
||||
try:
|
||||
val = chan.read()
|
||||
chan.check(val, self.force_send)
|
||||
status[chan.mesh_name] = chan.value
|
||||
except Exception as e:
|
||||
log.warning("An error occured in status check: {}".format(e))
|
||||
try:
|
||||
self.sendStatus(status)
|
||||
except Exception as e:
|
||||
log.warning("An error occured in send status: {}".format(e))
|
||||
for chan in CHANNELS[24:]:
|
||||
try:
|
||||
val = chan.read()
|
||||
if chan.mesh_name in ['totalizer_1','totalizer_2','totalizer_3']:
|
||||
right_now = dt.today()
|
||||
today_total, yesterday_total = self.totalize(val, PERSIST['yesterday_'+chan.mesh_name], right_now.day, right_now.hour, right_now.minute, PERSIST['yesterday_total_midnight_'+chan.mesh_name], PERSIST['yesterday_total_'+chan.mesh_name], chan.mesh_name)
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1,"today_"+chan.mesh_name, today_total,0,'PiFlow')
|
||||
self.sendtodbDev(1,"yesterday_"+chan.mesh_name, yesterday_total,0,'PiFlow')
|
||||
self.sendtodbDev(1, chan.mesh_name + "_units", "BBL",0,'PiFlow')
|
||||
elif chan.mesh_name == "frequency":
|
||||
if val > 0:
|
||||
self.rts.addHertzDataPoint(val)
|
||||
self.rts.saveDataToFile()
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1, "avgFrequency30Days", self.rts.calculateAverageHertzMultiDay(),0,'PiFlow')
|
||||
else:
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
except Exception as e:
|
||||
log.warning("An error occured in data collection: {}".format(e))
|
||||
else:
|
||||
for chan in CHANNELS:
|
||||
try:
|
||||
if chan.mesh_name == "remote_start":
|
||||
val = PERSIST["state"]
|
||||
else:
|
||||
val = None
|
||||
for _ in range(3):
|
||||
temp = chan.read()
|
||||
if not temp == None:
|
||||
val = temp
|
||||
if val == None:
|
||||
log.info("No modbus data sending previous value")
|
||||
val = chan.value
|
||||
if chan.mesh_name in ['totalizer_1','totalizer_2','totalizer_3']:
|
||||
right_now = dt.today()
|
||||
today_total, yesterday_total = self.totalize(val, PERSIST['yesterday_'+chan.mesh_name], right_now.day, right_now.hour, right_now.minute, PERSIST['yesterday_total_midnight_'+chan.mesh_name], PERSIST['yesterday_total_'+chan.mesh_name], chan.mesh_name)
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1,"today_"+chan.mesh_name, today_total,0,'PiFlow')
|
||||
self.sendtodbDev(1,"yesterday_"+chan.mesh_name, yesterday_total,0,'PiFlow')
|
||||
elif chan.mesh_name == "volume_flow" and not PERSIST['drive_enabled']:
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
if chan.value > 0:
|
||||
self.sendtodbDev(1, "run_status", "Running", 0, 'PiFlow')
|
||||
if not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"]:
|
||||
self.rts.startRun()
|
||||
self.rts.saveDataToFile()
|
||||
else:
|
||||
self.sendtodbDev(1,"run_status", "Stopped", 0, 'PiFlow')
|
||||
if self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"] and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["end"]:
|
||||
self.rts.endRun()
|
||||
self.rts.saveDataToFile()
|
||||
|
||||
self.sendtodbDev(1, "percentRunTime30Days", self.rts.calculateRunPercentMultiDay(), 0,'PiFlow')
|
||||
elif chan.mesh_name == "run_status":
|
||||
if "Operating" in val and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"]:
|
||||
self.rts.startRun()
|
||||
self.rts.saveDataToFile()
|
||||
elif "Stopped" in val and self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"] and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["end"]:
|
||||
self.rts.endRun()
|
||||
self.rts.saveDataToFile()
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1, "percentRunTime30Days", self.rts.calculateRunPercentMultiDay(), 0,'PiFlow')
|
||||
elif chan.mesh_name == "frequency":
|
||||
if val > 0:
|
||||
self.rts.addHertzDataPoint(val)
|
||||
self.rts.saveDataToFile()
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1, "avgFrequency30Days", self.rts.calculateAverageHertzMultiDay(),0,'PiFlow')
|
||||
elif chan.mesh_name == "remote_start":
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
PERSIST["state_timer"] = time.time()
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
else:
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
|
||||
except Exception as e:
|
||||
log.warning("An error occured: {}".format(e))
|
||||
time.sleep(3)
|
||||
|
||||
|
||||
# print("PiFlow driver still alive...")
|
||||
if self.force_send:
|
||||
if send_loops > 2:
|
||||
log.warning("Turning off force_send")
|
||||
self.force_send = False
|
||||
send_loops = 0
|
||||
else:
|
||||
send_loops += 1
|
||||
|
||||
|
||||
if (now - self.public_ip_address_last_checked) > IP_CHECK_PERIOD:
|
||||
self._check_ip_address()
|
||||
time.sleep(10)
|
||||
|
||||
def _check_ip_address(self):
|
||||
"""Check the public IP address and send to Meshify if changed."""
|
||||
self.public_ip_address_last_checked = time.time()
|
||||
test_public_ip = get_public_ip_address()
|
||||
test_public_ip = test_public_ip[:-1]
|
||||
test_private_ip = get_private_ip_address()
|
||||
if not test_public_ip == self.public_ip_address and not test_public_ip == "0.0.0.0":
|
||||
self.sendtodbDev(1, 'public_ip_address', test_public_ip, 0, 'PiFlow')
|
||||
self.public_ip_address = test_public_ip
|
||||
if not test_private_ip == self.private_ip_address:
|
||||
self.sendtodbDev(1, 'private_ip_address', test_private_ip, 0, 'PiFlow')
|
||||
self.private_ip_address = test_private_ip
|
||||
|
||||
def PiFlow_sync(self, name, value):
|
||||
"""Sync all data from the driver."""
|
||||
self.force_send = True
|
||||
# self.sendtodb("log", "synced", 0)
|
||||
return True
|
||||
|
||||
def PiFlow_flowmeternumber(self, name, unit_number):
|
||||
"""Change the unit number for the PiFlow flow meter"""
|
||||
unit_number = int(unit_number)
|
||||
if drive_enabled:
|
||||
for chan in CHANNELS[0:8]:
|
||||
chan.unit_number = unit_number
|
||||
PERSIST['flowmeter'] = unit_number
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
return True
|
||||
else:
|
||||
for chan in CHANNELS:
|
||||
chan.unit_number = unit_number
|
||||
PERSIST['flowmeter'] = unit_number
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
self.sendtodbDev(1, 'flowmeternumber', unit_number, 0,'PiFlow')
|
||||
return True
|
||||
return False
|
||||
|
||||
def PiFlow_drivenumber(self, name, unit_number):
|
||||
"""Change the unit number for the PiFlow drive"""
|
||||
unit_number = int(unit_number)
|
||||
for chan in CHANNELS[8:]:
|
||||
chan.unit_number = unit_number
|
||||
|
||||
PERSIST['drive'] = unit_number
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
self.sendtodbDev(1, 'drivenumber', unit_number, 0,'PiFlow')
|
||||
return True
|
||||
|
||||
def PiFlow_reboot(self, name, value):
|
||||
os.system('reboot')
|
||||
return True
|
||||
|
||||
def PiFlow_drive_enabled(self, name, value):
|
||||
value = int(value)
|
||||
if value == 1:
|
||||
PERSIST['drive_enabled'] = True
|
||||
else:
|
||||
PERSIST['drive_enabled'] = False
|
||||
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
self.sendtodbDev(1, 'drive_enabled', value, 0,'PiFlow')
|
||||
return True
|
||||
|
||||
def PiFlow_write(self, name, value):
|
||||
"""Write a value to the device via modbus"""
|
||||
new_val = json.loads(str(value).replace("'", '"'))
|
||||
addr_n = int(new_val['addr'])
|
||||
reg_n = int(new_val['reg'])
|
||||
val_n = new_val['val']
|
||||
for chan in CHANNELS:
|
||||
if chan.unit_number == addr_n and chan.register_number == reg_n:
|
||||
write_res = chan.write(val_n)
|
||||
|
||||
log.info("Result of PiFlow_write(self, {}, {}) = {}".format(name, value, write_res))
|
||||
return write_res
|
||||
"""
|
||||
def PiFlow_start(self, name, value):
|
||||
if isVFD:
|
||||
#do something with the plc
|
||||
log.info("Sending START signal to PLC")
|
||||
else:
|
||||
log.info("Sending START signal to Drive via relay {}".format(Relay_Ch1))
|
||||
GPIO.output(Relay_Ch1,GPIO.LOW)
|
||||
PERSIST["state"] = True
|
||||
PERSIST["state_timer"] = time.time()
|
||||
persistence.store(PERSIST,"persist.json")
|
||||
|
||||
return True
|
||||
|
||||
def PiFlow_stop(self, name, value):
|
||||
if isVFD:
|
||||
log.info("Sending STOP signal to PLC")
|
||||
#do something with the plc
|
||||
else:
|
||||
log.info("Sending STOP signal to Drive")
|
||||
GPIO.output(Relay_Ch1,GPIO.HIGH)
|
||||
PERSIST["state"] = False
|
||||
PERSIST["state_timer"] = time.time()
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
return True
|
||||
"""
|
||||
def totalize(self,val, yesterday, day, hour, minute, yesterday_total_midnight, yesterday_total,channel):
|
||||
if (yesterday_total == 0 and yesterday_total_midnight == 0) or (yesterday_total == None or yesterday_total_midnight == None):
|
||||
yesterday_total_midnight = val
|
||||
PERSIST['yesterday_total_midnight_'+channel] = yesterday_total_midnight
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
today_total = val - yesterday_total_midnight
|
||||
if hour == 0 and minute == 0 and not(day == yesterday):
|
||||
self.rts.manageTime()
|
||||
yesterday_total = today_total
|
||||
yesterday_total_midnight = val
|
||||
today_total = val - yesterday_total_midnight
|
||||
yesterday = day
|
||||
PERSIST['yesterday_'+channel] = yesterday
|
||||
PERSIST['yesterday_total_'+channel] = yesterday_total
|
||||
PERSIST['yesterday_total_midnight_'+channel] = yesterday_total_midnight
|
||||
persistence.store(PERSIST,'persist.json')
|
||||
|
||||
return today_total,yesterday_total
|
||||
|
||||
def sendStatus(self,status):
|
||||
status_string = ""
|
||||
|
||||
fault_codes = {
|
||||
0: "",
|
||||
2: "Auxiliary Input",
|
||||
3: "Power Loss",
|
||||
4: "UnderVoltage",
|
||||
5: "OverVoltage",
|
||||
7: "Motor Overload",
|
||||
8: "Heatsink OvrTemp",
|
||||
9: "Thermister OvrTemp",
|
||||
10: "DynBrake OverTemp",
|
||||
12: "HW OverCurrent",
|
||||
13: "Ground Fault",
|
||||
14: "Ground Warning",
|
||||
15: "Load Loss",
|
||||
17: "Input Phase Loss",
|
||||
18: "Motor PTC Trip",
|
||||
19: "Task Overrun",
|
||||
20: "TorqPrv Spd Band",
|
||||
21: "Output PhaseLoss",
|
||||
24: "Decel Inhibit",
|
||||
25: "OverSpeed Limit",
|
||||
26: "Brake Slipped",
|
||||
27: "Torq Prove Cflct",
|
||||
28: "TP Encls Config",
|
||||
29: "Analog In Loss",
|
||||
33: "AuRsts Exhausted",
|
||||
35: "IPM OverCurrent",
|
||||
36: "SW OverCurrent",
|
||||
38: "Phase U to Grnd",
|
||||
39: "Phase V to Grnd",
|
||||
40: "Phase W to Grnd",
|
||||
41: "Phase UV Short",
|
||||
42: "Phase VW Short",
|
||||
43: "Phase WU Short",
|
||||
44: "Phase UNegToGrnd",
|
||||
45: "Phase VNegToGrnd",
|
||||
46: "Phase WNegToGrnd",
|
||||
48: "System Defaulted",
|
||||
49: "Drive Powerup",
|
||||
51: "Clr Fault Queue",
|
||||
55: "Ctrl Bd Overtemp",
|
||||
59: "Invalid Code",
|
||||
61: "Shear Pin 1",
|
||||
62: "Shear Pin 2",
|
||||
64: "Drive Overload",
|
||||
67: "Pump Off",
|
||||
71: "Port 1 Adapter",
|
||||
72: "Port 2 Adapter",
|
||||
73: "Port 3 Adapter",
|
||||
74: "Port 4 Adapter",
|
||||
75: "Port 5 Adapter",
|
||||
76: "Port 6 Adapter",
|
||||
77: "IR Volts Range",
|
||||
78: "FluxAmpsRef Rang",
|
||||
79: "Excessive Load",
|
||||
80: "AutoTune Aborted",
|
||||
81: "Port 1 DPI Loss",
|
||||
82: "Port 2 DPI Loss",
|
||||
83: "Port 3 DPI Loss",
|
||||
84: "Port 4 DPI Loss",
|
||||
85: "Port 5 DPI Loss",
|
||||
86: "Port 6 DPI Loss",
|
||||
87: "Ixo VoltageRange",
|
||||
91: "Pri VelFdbk Loss",
|
||||
93: "Hw Enable Check",
|
||||
94: "Alt VelFdbk Loss",
|
||||
95: "Aux VelFdbk Loss",
|
||||
96: "PositionFdbkLoss",
|
||||
97: "Auto Tach Switch",
|
||||
100: "Parameter Chksum",
|
||||
101: "PwrDn NVS Blank",
|
||||
102: "NVS Not Blank",
|
||||
103: "PwrDn Nvs Incomp",
|
||||
104: "Pwr Brd Checksum",
|
||||
106: "Incompat MCB-PB",
|
||||
107: "Replaced MCB-PB",
|
||||
108: "Anlg Cal Chksum",
|
||||
110: "Ivld Pwr Bd Data",
|
||||
111: "PwrBd Invalid ID",
|
||||
112: "PwrBd App MinVer",
|
||||
113: "Tracking DataErr",
|
||||
115: "PwrDn Table Full",
|
||||
116: "PwrDnEntry2Large",
|
||||
117: "PwrDn Data Chksm",
|
||||
118: "PwrBd PwrDn Chks",
|
||||
124: "App ID Changed",
|
||||
125: "Using Backup App",
|
||||
134: "Start on PowerUp",
|
||||
137: "Ext Prechrg Err",
|
||||
138: "Precharge Open",
|
||||
141: "Autn Enc Angle",
|
||||
142: "Autn Spd Rstrct",
|
||||
143: "AutoTune CurReg",
|
||||
144: "AutoTune Inertia",
|
||||
145: "AutoTune Travel",
|
||||
13037: "Net IO Timeout"
|
||||
}
|
||||
|
||||
if status['vfd_active'] == "Stopped":
|
||||
status_string = status_string + status['vfd_active'] + "; " + status['vfd_ready']
|
||||
else:
|
||||
status_string = status_string + status['vfd_active']
|
||||
if status['vfd_rev']:
|
||||
status_string = status_string + '; ' + status['vfd_rev']
|
||||
if status['vfd_fwd']:
|
||||
status_string = status_string + '; ' + status['vfd_fwd']
|
||||
if status['vfd_atreference']:
|
||||
status_string = status_string + '; ' + status['vfd_atreference']
|
||||
alarm_string = ""
|
||||
if status['vfd_faulted'] == "Drive Faulted":
|
||||
status_string = status_string + '; ' + status['vfd_faulted']
|
||||
if status['vfd_commloss']:
|
||||
alarm_string = alarm_string + '; ' + status['vfd_commloss']
|
||||
if status['vfd_fbkalarm']:
|
||||
alarm_string = alarm_string + '; ' + status['vfd_fbkalarm']
|
||||
if status['vfd_faultcode']:
|
||||
alarm_string = alarm_string + '; ' + "Fault: {} Fault code: {}".format(fault_codes[status['vfd_faultcode']],str(status['vfd_faultcode']))
|
||||
if status['minspeedalarm']:
|
||||
alarm_string = alarm_string + '; ' + status['minspeedalarm']
|
||||
if status['pumpedoff']:
|
||||
alarm_string = alarm_string + '; ' + status['pumpedoff']
|
||||
if status['lockedout']:
|
||||
alarm_string = alarm_string + '; ' + status['lockedout']
|
||||
if status['tubingpressurehi']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurehi']
|
||||
if status['tubingpressurehihi']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurehihi']
|
||||
if status['tubingpressurelo']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurelo']
|
||||
if status['tubingpressurelolo']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurelolo']
|
||||
if status['flowmeterhihi']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterhihi']
|
||||
if status['flowmeterhi']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterhi']
|
||||
if status['flowmeterlolo']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterlolo']
|
||||
if status['flowmeterlo']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterlo']
|
||||
if status['fluidlevellolo']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevellolo']
|
||||
if status['fluidlevello']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevello']
|
||||
if status['fluidlevelhi']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevelhi']
|
||||
if status['fluidlevelhihi']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevelhihi']
|
||||
try:
|
||||
if status_string and status_string[0] == '; ':
|
||||
status_string = status_string[1:]
|
||||
if status_string and status_string[-1] == '; ':
|
||||
status_string = status_string[:-1]
|
||||
if alarm_string and alarm_string[0] == '; ':
|
||||
alarm_string = alarm_string[1:]
|
||||
if alarm_string and alarm_string[-1] == '; ':
|
||||
alarm_string = alarm_string[:-1]
|
||||
except Exception as e:
|
||||
log.warning("Error in send status semicolon: {}".format(e))
|
||||
|
||||
|
||||
if self.status != status_string:
|
||||
self.status = status_string
|
||||
log.info("Sending {} for {}".format(status_string, 'run_status'))
|
||||
self.sendtodbDev(1, 'run_status', status_string, 0, 'PiFlow')
|
||||
if "Operating" in status_string and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"]:
|
||||
self.rts.startRun()
|
||||
self.rts.saveDataToFile()
|
||||
elif "Stopped" in status_string and self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"] and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["end"]:
|
||||
self.rts.endRun()
|
||||
self.rts.saveDataToFile()
|
||||
self.sendtodbDev(1, "percentRunTime30Days", self.rts.calculateRunPercentMultiDay(), 0,'PiFlow')
|
||||
if self.alarm != alarm_string:
|
||||
self.alarm = alarm_string
|
||||
log.info("Sending {} for {}".format(alarm_string, 'fault_a'))
|
||||
self.sendtodbDev(1, 'fault_a', alarm_string, 0 , 'PiFlow')
|
||||
|
||||
|
||||
|
||||
|
||||
92
meshifyDrivers/piflow/Tags.py
Normal file
92
meshifyDrivers/piflow/Tags.py
Normal file
@@ -0,0 +1,92 @@
|
||||
from Channel import PLCChannel,Channel, ModbusChannel, status_codes, fault_code_a, fault_code_b, volume_units, totalizer_units
|
||||
import persistence
|
||||
|
||||
PERSIST = persistence.load('persist.json')
|
||||
flowmeter_unit_number = PERSIST['flowmeter']
|
||||
drive_enabled = PERSIST['drive_enabled']
|
||||
isVFD = PERSIST['isVFD']
|
||||
if drive_enabled:
|
||||
drive_unit_number = PERSIST['drive']
|
||||
try:
|
||||
plc_ip = PERSIST['plc_ip']
|
||||
except:
|
||||
PERSIST['plc_ip'] = '192.168.1.12'
|
||||
persistence.store(PERSIST)
|
||||
if isVFD:
|
||||
tags = [
|
||||
PLCChannel(plc_ip,'vfd_atreference','sts_VFD_AtReference','BOOL',0,3600,map_={0: "", 1: "At speed"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_rev','sts_VFD_REV','BOOL',0,3600,map_={0: "", 1: "Operating in Reverse"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_fwd','sts_VFD_FWD','BOOL',0,3600,map_={0: "", 1: "Operating in Forward"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_active','sts_VFD_Active','BOOL',0,3600,map_={0: "Stopped", 1: "Running"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_ready','sts_VFD_Ready','BOOL',0,3600,map_={0: "Drive Not Ready", 1: "Drive Ready"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_faultcode','sts_VFD_FaultCode','REAL',0,3600, plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_faulted','AL0_VFD','BOOL',0,3600,map_={0: "", 1: "Drive Faulted"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_commloss','AL0_VFDComLoss','BOOL',0,3600,map_={0: "", 1: "Drive Comms Loss"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_fbkalarm','AL0_VFD_FBAlarm','BOOL',0,3600,map_={0: "", 1: "Drive Lost Feedback"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurehi','AL0_TubingPressureHi','BOOL',0,3600,map_={0: "", 1: "High Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurehihi','AL0_TubingPressureHiHi','BOOL',0,3600,map_={0: "", 1: "High High Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurelo','AL0_TubingPressureLo','BOOL',0,3600,map_={0: "", 1: "Low Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurelolo','AL0_TubingPressureLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterhihi','AL0_FlowMeterHiHi','BOOL',0,3600,map_={0: "", 1: "High High FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterhi','AL0_FlowMeterHi','BOOL',0,3600,map_={0: "", 1: "High FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterlolo','AL0_FlowMeterLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterlo','AL0_FlowMeterLo','BOOL',0,3600,map_={0: "", 1: "Low FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'minspeedalarm','AL0_MinSpeedAlarm','BOOL',0,3600,map_={0: "", 1: "Drive not able to maintain min speed"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'pumpedoff','AL0_PumpedOff','BOOL',0,3600,map_={0: "", 1: "Pumped Off"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevellolo','AL0_FluidLevelLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevello','AL0_FluidLevelLo','BOOL',0,3600,map_={0: "", 1: "Low Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevelhi','AL0_FluidLevelHi','BOOL',0,3600,map_={0: "", 1: "High Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevelhihi','AL0_FluidLevelHiHi','BOOL',0,3600,map_={0: "", 1: "High High Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'lockedout','AlarmLockOut','BOOL',0,3600,map_={0: "", 1: "Locked Out Repeated Alarms"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'volume_flow','Val_FlowmeterFR','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'current','val_VFD_OutputCurrent','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'frequency','val_VFD_ActualSpeed','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'pid_feedback','val_FluidLevel','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'totalizer_1','Val_FlowMeterT1','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'totalizer_2','Val_FlowMeterT2','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'totalizer_3','Val_FlowMeterT3','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'volume_flow_units','CMD_FlowMeterUnit','BOOL',1,3600,map_={0: "GPM", 1: "BPD"},plc_type='Micro800')
|
||||
]
|
||||
else:
|
||||
if drive_enabled:
|
||||
tags = [
|
||||
ModbusChannel('volume_flow', 3873, 'FLOAT', 10, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_1', 2609, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_2', 2809, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_3', 3009, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('volume_flow_units', 2102, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=volume_units),
|
||||
ModbusChannel('totalizer_1_units', 4603, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_2_units', 4604, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_3_units', 4605, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('remote_start', 0000, 'INTEGER', 1, 86400, channel_size=1, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('run_status', 772, 'STRING', 0, 3600, channel_size=1, unit_number=drive_unit_number, transform_fn=status_codes),
|
||||
ModbusChannel('frequency', 784, 'INTEGER', 2, 3600, channel_size=2, unit_number=drive_unit_number,scaling=2 ),
|
||||
ModbusChannel('current', 783, 'INTEGER', 2, 3600, channel_size=2, unit_number=drive_unit_number,scaling=1 ),
|
||||
ModbusChannel('fault_a', 815, 'STRING', 1, 3600, channel_size=1, unit_number=drive_unit_number,transform_fn=fault_code_a),
|
||||
ModbusChannel('fault_b', 816, 'STRING', 1, 3600, channel_size=1, unit_number=drive_unit_number,transform_fn=fault_code_b),
|
||||
ModbusChannel('pid_ref', 791, 'INTEGER', 5, 3600, channel_size=1, unit_number=drive_unit_number,scaling=1),
|
||||
ModbusChannel('pid_feedback', 792, 'INTEGER', 5, 3600, channel_size=1, unit_number=drive_unit_number,scaling=1),
|
||||
ModbusChannel('motor_rated_current', 4896, 'INTEGER', 300, 86400, channel_size=1, unit_number=drive_unit_number,scaling=1),
|
||||
ModbusChannel('sleep_delay', 4924, 'INTEGER', 5, 86400, channel_size=1, unit_number=drive_unit_number, scaling=1)
|
||||
]
|
||||
else:
|
||||
tags = [
|
||||
ModbusChannel('volume_flow', 3873, 'FLOAT', 10, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_1', 2609, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_2', 2809, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_3', 3009, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('volume_flow_units', 2102, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=volume_units),
|
||||
ModbusChannel('totalizer_1_units', 4603, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_2_units', 4604, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_3_units', 4605, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('remote_start', 0000, 'BOOL', 1, 86400, channel_size=1, unit_number=flowmeter_unit_number)
|
||||
]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
BIN
meshifyDrivers/piflow/__pycache__/runtimeStats.cpython-39.pyc
Normal file
BIN
meshifyDrivers/piflow/__pycache__/runtimeStats.cpython-39.pyc
Normal file
Binary file not shown.
17
meshifyDrivers/piflow/config.txt
Normal file
17
meshifyDrivers/piflow/config.txt
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
|
||||
"driverFileName":"PiFlow.py",
|
||||
"deviceName":"piflow",
|
||||
"driverId":"0280",
|
||||
"releaseVersion":"28",
|
||||
"files": {
|
||||
"file1":"PiFlow.py",
|
||||
"file2":"Channel.py",
|
||||
"file3":"file_logger.py",
|
||||
"file4":"Tags.py",
|
||||
"file5":"utilities.py",
|
||||
"file6":"persistence.py",
|
||||
"file7":"runtimeStats.py"
|
||||
}
|
||||
|
||||
}
|
||||
205
meshifyDrivers/piflow/device_base.py
Normal file
205
meshifyDrivers/piflow/device_base.py
Normal file
@@ -0,0 +1,205 @@
|
||||
import types
|
||||
import traceback
|
||||
import binascii
|
||||
import threading
|
||||
import time
|
||||
import thread
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
class deviceBase():
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
self.offset = offset
|
||||
self.company = companyId
|
||||
self.name = name
|
||||
self.number = number
|
||||
self.q = Q
|
||||
self.deviceName = name + '_[' + mac + ':' + number[0:2] + ':' + number[2:] + ']!'
|
||||
self.chName = "M1" + '_[' + mac + ':'
|
||||
self.chName2 = '_[' + mac + ':'
|
||||
print 'device name is:'
|
||||
print self.deviceName
|
||||
mac2 = mac.replace(":", "")
|
||||
self.mac = mac2.upper()
|
||||
self.address = 1
|
||||
self.debug = True
|
||||
self.mcu = mcu
|
||||
self.firstRun = True
|
||||
self.mqtt = mqtt
|
||||
self.nodes = Nodes
|
||||
#local dictionary of derived nodes ex: localNodes[tank_0199] = self
|
||||
self.localNodes = {}
|
||||
os.system("chmod 777 /root/reboot")
|
||||
os.system("echo nameserver 8.8.8.8 > /etc/resolv.conf")
|
||||
|
||||
|
||||
def sendtodbLoc(self, ch, channel, value, timestamp, deviceName, mac):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
#make the techname
|
||||
lst = textwrap.wrap(str(mac), width=2)
|
||||
tech = ""
|
||||
for i in range(len(lst)):
|
||||
tech += lst[i].lower() + ":"
|
||||
|
||||
|
||||
chName2 = '_[' + tech
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
if len(ch) > 2:
|
||||
ch = ch[:-2]
|
||||
|
||||
dname = deviceName + chName2 + str(ch) + ":98]!"
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDevJSON(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLora(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if ":" not in ch:
|
||||
ch = ch[0:2] + ":" + ch[2:4]
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch).replace(':', "")
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + "]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDev(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendToTB(self, payload):
|
||||
topic = 'v1/devices/me/telemetry'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendToTBAttributes(self, payload):
|
||||
topic = 'v1/devices/me/attributes'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendtodbCH(self, ch, channel, value, timestamp):
|
||||
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(ch)
|
||||
|
||||
dname = self.chName + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodb(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbJSON(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
def getTime(self):
|
||||
return str(int(time.time() + int(self.offset)))
|
||||
|
||||
|
||||
|
||||
|
||||
18
meshifyDrivers/piflow/file_logger.py
Normal file
18
meshifyDrivers/piflow/file_logger.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Logging setup for PiFlow"""
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import sys
|
||||
|
||||
log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s')
|
||||
log_file = './PiFlow.log'
|
||||
my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=500*1024,
|
||||
backupCount=2, encoding=None, delay=0)
|
||||
my_handler.setFormatter(log_formatter)
|
||||
my_handler.setLevel(logging.INFO)
|
||||
filelogger = logging.getLogger('PiFlow')
|
||||
filelogger.setLevel(logging.INFO)
|
||||
filelogger.addHandler(my_handler)
|
||||
|
||||
console_out = logging.StreamHandler(sys.stdout)
|
||||
console_out.setFormatter(log_formatter)
|
||||
filelogger.addHandler(console_out)
|
||||
20
meshifyDrivers/piflow/modbusTester.py
Normal file
20
meshifyDrivers/piflow/modbusTester.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import minimalmodbus
|
||||
|
||||
minimalmodbus.BAUDRATE = 9600
|
||||
minimalmodbus.STOPBITS = 1
|
||||
address = 123
|
||||
|
||||
instrument = minimalmodbus.Instrument('/dev/ttyS0', address) #device, modbus slave address
|
||||
instrument.debug = True
|
||||
for _ in range(3):
|
||||
try:
|
||||
value = instrument.read_float(3873) #register -1 for float
|
||||
print("Flow Rate from Flow Meter: {}".format(value))
|
||||
except Exception as e:
|
||||
print("Error: {}".format(e))
|
||||
|
||||
try:
|
||||
value = instrument.read_float(784) #register -1 for float
|
||||
print("Frequency from Drive: {}".format(value))
|
||||
except Exception as e:
|
||||
print("Error: {}".format(e))
|
||||
21
meshifyDrivers/piflow/persistence.py
Normal file
21
meshifyDrivers/piflow/persistence.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Data persistance functions."""
|
||||
# if more advanced persistence is needed, use a sqlite database
|
||||
import json
|
||||
|
||||
|
||||
def load(filename="persist.json"):
|
||||
"""Load persisted settings from the specified file."""
|
||||
try:
|
||||
with open(filename, 'r') as persist_file:
|
||||
return json.load(persist_file)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def store(persist_obj, filename="persist.json"):
|
||||
"""Store the persisting settings into the specified file."""
|
||||
try:
|
||||
with open(filename, 'w') as persist_file:
|
||||
return json.dump(persist_obj, persist_file, indent=4)
|
||||
except Exception:
|
||||
return False
|
||||
172
meshifyDrivers/piflow/runtimeStats.py
Normal file
172
meshifyDrivers/piflow/runtimeStats.py
Normal file
@@ -0,0 +1,172 @@
|
||||
from datetime import datetime as dt
|
||||
import time
|
||||
import json
|
||||
import math
|
||||
|
||||
class RuntimeStats:
|
||||
|
||||
def __init__(self):
|
||||
self.runs = {}
|
||||
self.currentRun = 0
|
||||
self.today = ""
|
||||
self.todayString = ""
|
||||
|
||||
def manageTime(self):
|
||||
if self.todayString != dt.strftime(dt.today(), "%Y-%m-%d"):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.mktime(dt.strptime(self.todayString + " 23:59:59", "%Y-%m-%d %H:%M:%S").timetuple())
|
||||
self.addDay()
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
while (dt.strptime(days[-1],"%Y-%m-%d") - dt.strptime(days[0], "%Y-%m-%d")).days > 30:
|
||||
self.removeDay(day=days[0])
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
|
||||
def addHertzDataPoint(self, frequency):
|
||||
if frequency > 0:
|
||||
self.manageTime()
|
||||
try:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"].append(frequency)
|
||||
except:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"] = [frequency]
|
||||
|
||||
def startRun(self):
|
||||
if self.checkRunning():
|
||||
self.endRun()
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] = time.time()
|
||||
|
||||
def endRun(self):
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.time()
|
||||
self.currentRun += 1
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def checkRunning(self):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def addDay(self):
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
self.currentRun = 1
|
||||
self.runs[self.todayString] = {}
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def countRunsDay(self, day=None):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
return len(self.runs[day].keys())
|
||||
|
||||
def countRunsMultiDay(self, numDays=30):
|
||||
total_runs = 0
|
||||
for day in list(self.runs.keys()):
|
||||
total_runs += self.countRunsDay(day=day)
|
||||
return total_runs
|
||||
|
||||
def calculateAverageHertzDay(self, day=None, returnArray=False):
|
||||
dayFrequencies = []
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
try:
|
||||
dayFrequencies += self.runs[day][run]["frequencies"]
|
||||
except Exception as e:
|
||||
print("{} missing frequency data for {}".format(day,run))
|
||||
if returnArray:
|
||||
return dayFrequencies
|
||||
return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)
|
||||
|
||||
def calculateAverageHertzMultiDay(self, numDays=30):
|
||||
self.manageTime()
|
||||
frequencies = []
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
try:
|
||||
frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)
|
||||
except Exception as e:
|
||||
print("{} missing frequency data".format(day))
|
||||
if len(frequencies):
|
||||
return round(math.fsum(frequencies)/len(frequencies), 2)
|
||||
return 0
|
||||
|
||||
def calculateRunTimeDay(self, day=None, convertToHours=True):
|
||||
total_time = 0
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
total_time = self.runs[day][run]["end"] - self.runs[day][run]["start"] + total_time
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):
|
||||
total_time = 0
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
total_time += self.calculateRunTimeDay(day=day, convertToHours=False)
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunPercentDay(self, day=None, precise=False):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
if precise:
|
||||
return (self.calculateRunTimeDay(day=day)/24) * 100
|
||||
return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)
|
||||
|
||||
|
||||
def calculateRunPercentMultiDay(self, numDays=30, precise=False):
|
||||
self.manageTime()
|
||||
if precise:
|
||||
return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100
|
||||
return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)
|
||||
|
||||
def removeDay(self, day=None):
|
||||
if not day:
|
||||
raise Exception("Day can not be None")
|
||||
print("removing day {}".format(day))
|
||||
del self.runs[day]
|
||||
|
||||
def convertSecondstoHours(self, seconds):
|
||||
return round(seconds / (60*60),2)
|
||||
|
||||
def loadDataFromFile(self, filePath="./runtimestats.json"):
|
||||
try:
|
||||
with open(filePath, "r") as f:
|
||||
temp = json.load(f)
|
||||
self.runs = temp["data"]
|
||||
self.currentRun = temp["current_run"]
|
||||
self.today = dt.strptime(temp["current_day"], "%Y-%m-%d")
|
||||
self.todayString = temp["current_day"]
|
||||
self.manageTime()
|
||||
except:
|
||||
print("Could not find file at {}".format(filePath))
|
||||
print("creating file")
|
||||
self.addDay()
|
||||
try:
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
def saveDataToFile(self, filePath="./runtimestats.json"):
|
||||
try:
|
||||
print("Saving Runs")
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
638
meshifyDrivers/piflow/runtimestats.ipynb
Normal file
638
meshifyDrivers/piflow/runtimestats.ipynb
Normal file
@@ -0,0 +1,638 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from datetime import datetime as dt\n",
|
||||
"from datetime import timedelta as td\n",
|
||||
"from time import sleep\n",
|
||||
"import json\n",
|
||||
"import math"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 106,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"class RuntimeStats:\n",
|
||||
" \n",
|
||||
" def __init__(self):\n",
|
||||
" self.runs = {}\n",
|
||||
" self.currentRun = 0\n",
|
||||
" self.today = \"\"\n",
|
||||
" self.todayString = \"\"\n",
|
||||
"\n",
|
||||
" def manageTime(self):\n",
|
||||
" if self.today != dt.today():\n",
|
||||
" if self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"start\"] and not self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"]:\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"] = dt.timestamp(dt.strptime(self.todayString + \" 23:59:59\", \"%Y-%m-%d %H:%M:%S\"))\n",
|
||||
" self.addDay()\n",
|
||||
" days = list(self.runs.keys())\n",
|
||||
" days.sort()\n",
|
||||
" while (dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days > 30:\n",
|
||||
" self.removeDay(day=days[0])\n",
|
||||
" days = list(self.runs.keys())\n",
|
||||
" days.sort()\n",
|
||||
"\n",
|
||||
" def addHertzDataPoint(self, frequency):\n",
|
||||
" if frequency > 0:\n",
|
||||
" self.manageTime()\n",
|
||||
" try:\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"frequencies\"].append(frequency)\n",
|
||||
" except:\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"frequencies\"] = [frequency]\n",
|
||||
"\n",
|
||||
" def startRun(self):\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"start\"] = dt.timestamp(dt.now())\n",
|
||||
"\n",
|
||||
" def endRun(self):\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"] = dt.timestamp(dt.now()) \n",
|
||||
"\n",
|
||||
" def addDay(self):\n",
|
||||
" self.today = dt.today()\n",
|
||||
" self.todayString = dt.strftime(self.today, \"%Y-%m-%d\")\n",
|
||||
" self.currentRun = 1\n",
|
||||
" self.runs[self.todayString] = {}\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)] = {\"start\":0, \"end\": 0, \"frequencies\":[]}\n",
|
||||
"\n",
|
||||
" def countRunsDay(self, day=None):\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" return len(self.runs[day].keys())\n",
|
||||
"\n",
|
||||
" def countRunsMultiDay(self, numDays=30):\n",
|
||||
" total_runs = 0\n",
|
||||
" for day in list(self.runs.keys()):\n",
|
||||
" total_runs += self.countRunsDay(day=day)\n",
|
||||
" return total_runs\n",
|
||||
"\n",
|
||||
" def calculateAverageHertzDay(self, day=None, returnArray=False):\n",
|
||||
" dayFrequencies = []\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" for run in list(self.runs[day].keys()):\n",
|
||||
" try:\n",
|
||||
" dayFrequencies += self.runs[day][run][\"frequencies\"]\n",
|
||||
" except Exception as e:\n",
|
||||
" print(\"{} missing frequency data for {}\".format(day,run))\n",
|
||||
" if returnArray:\n",
|
||||
" return dayFrequencies\n",
|
||||
" return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)\n",
|
||||
"\n",
|
||||
" def calculateAverageHertzMultiDay(self, numDays=30):\n",
|
||||
" frequencies = []\n",
|
||||
" for day in list(self.runs.keys()):\n",
|
||||
" try:\n",
|
||||
" frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)\n",
|
||||
" except Exception as e:\n",
|
||||
" print(\"{} missing frequency data\".format(day))\n",
|
||||
" return round(math.fsum(frequencies)/len(frequencies), 2)\n",
|
||||
" \n",
|
||||
" def calculateRunTimeDay(self, day=None, convertToHours=True):\n",
|
||||
" total_time = 0\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" for run in list(self.runs[day].keys()):\n",
|
||||
" total_time = self.runs[day][run][\"end\"] - self.runs[day][run][\"start\"] + total_time\n",
|
||||
" if convertToHours:\n",
|
||||
" return RuntimeStats.convertSecondstoHours(total_time)\n",
|
||||
" return total_time\n",
|
||||
"\n",
|
||||
" def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):\n",
|
||||
" total_time = 0\n",
|
||||
" for day in list(self.runs.keys()):\n",
|
||||
" total_time += self.calculateRunTimeDay(day=day, convertToHours=False)\n",
|
||||
" if convertToHours:\n",
|
||||
" return RuntimeStats.convertSecondstoHours(total_time)\n",
|
||||
" return total_time\n",
|
||||
" \n",
|
||||
" def calculateRunPercentDay(self, day=None, precise=False):\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" if precise:\n",
|
||||
" return (self.calculateRunTimeDay(day=day)/24) * 100\n",
|
||||
" return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" def calculateRunPercentMultiDay(self, numDays=30, precise=False):\n",
|
||||
" if precise:\n",
|
||||
" return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100\n",
|
||||
" return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)\n",
|
||||
"\n",
|
||||
" def removeDay(self, day=None):\n",
|
||||
" if not day:\n",
|
||||
" raise Exception(\"Day can not be None\")\n",
|
||||
" print(\"removing day {}\".format(day))\n",
|
||||
" del self.runs[day]\n",
|
||||
" \n",
|
||||
" def convertSecondstoHours(seconds):\n",
|
||||
" return round(seconds / (60*60),2)\n",
|
||||
"\n",
|
||||
" def loadDataFromFile(self, filePath=\"../runtimestats.json\"):\n",
|
||||
" try:\n",
|
||||
" with open(filePath, \"r\") as f:\n",
|
||||
" temp = json.load(f)\n",
|
||||
" self.runs = temp[\"data\"]\n",
|
||||
" self.currentRun = temp[\"current_run\"]\n",
|
||||
" self.today = dt.strptime(temp[\"current_day\"], \"%Y-%m-%d\")\n",
|
||||
" self.todayString = temp[\"current_day\"]\n",
|
||||
" self.manageTime()\n",
|
||||
" except FileExistsError:\n",
|
||||
" print(\"Could not find file at {}\".format(filePath))\n",
|
||||
" except FileNotFoundError:\n",
|
||||
" print(\"Could not find file at {}\".format(filePath))\n",
|
||||
" print(\"creating file\")\n",
|
||||
" try:\n",
|
||||
" with open(filePath, \"w\") as f:\n",
|
||||
" d = {\n",
|
||||
" \"current_run\": self.currentRun,\n",
|
||||
" \"current_day\": self.todayString,\n",
|
||||
" \"data\": self.runs\n",
|
||||
" }\n",
|
||||
" json.dump(d, f, indent=4)\n",
|
||||
" except Exception as e:\n",
|
||||
" print(e)\n",
|
||||
" except Exception as e:\n",
|
||||
" print(e)\n",
|
||||
"\n",
|
||||
" def saveDataToFile(self, filePath=\"../runtimestats.json\"):\n",
|
||||
" try:\n",
|
||||
" print(\"Saving Runs\")\n",
|
||||
" with open(filePath, \"w+\") as f:\n",
|
||||
" d = {\n",
|
||||
" \"current_run\": self.currentRun,\n",
|
||||
" \"current_day\": self.todayString,\n",
|
||||
" \"data\": self.runs\n",
|
||||
" }\n",
|
||||
" json.dump(d, f, indent=4)\n",
|
||||
" except FileNotFoundError:\n",
|
||||
" print(\"Could not find file at {}\".format(filePath))\n",
|
||||
" except Exception as e:\n",
|
||||
" print(e)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 107,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{}\n",
|
||||
"{'2023-01-11': {'run_1': {'start': 1673465959.694776, 'frequencies': [67, 65, 59, 62, 100], 'end': 1673475545.313309}, 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883, 'frequencies': [100, 99, 98, 87, 56, 56, 58, 67]}}, '2023-01-10': {'run_1': {'start': 1673465959.694776, 'frequencies': [67, 65, 59, 62], 'end': 1673469136.691883}, 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}}, '2023-01-09': {'run_1': {'start': 1673465959.694776, 'frequencies': [67, 65, 59, 62], 'end': 1673469136.691883}, 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}}, '2022-12-17': {'run_1': {'start': 0, 'end': 0, 'frequencies': []}}}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts = RuntimeStats()\n",
|
||||
"print(rts.runs)\n",
|
||||
"path = \"/Users/nico/Documents/test/runtimestats.json\"\n",
|
||||
"rts.loadDataFromFile(filePath=path)\n",
|
||||
"print(rts.runs)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 108,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"removing day 2022-12-17\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.manageTime()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 109,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'2023-01-11': {'run_1': {'start': 1673465959.694776,\n",
|
||||
" 'frequencies': [67, 65, 59, 62, 100],\n",
|
||||
" 'end': 1673475545.313309},\n",
|
||||
" 'run_2': {'start': 1673469145.271416,\n",
|
||||
" 'end': 1673469136.691883,\n",
|
||||
" 'frequencies': [100, 99, 98, 87, 56, 56, 58, 67]}},\n",
|
||||
" '2023-01-10': {'run_1': {'start': 1673465959.694776,\n",
|
||||
" 'frequencies': [67, 65, 59, 62],\n",
|
||||
" 'end': 1673469136.691883},\n",
|
||||
" 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}},\n",
|
||||
" '2023-01-09': {'run_1': {'start': 1673465959.694776,\n",
|
||||
" 'frequencies': [67, 65, 59, 62],\n",
|
||||
" 'end': 1673469136.691883},\n",
|
||||
" 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}},\n",
|
||||
" '2023-01-17': {'run_1': {'start': 0, 'end': 0, 'frequencies': []}}}"
|
||||
]
|
||||
},
|
||||
"execution_count": 109,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.runs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.endRun()\n",
|
||||
"print(rts.runs)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.saveDataToFile(filePath=path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.startRun()\n",
|
||||
"print(rts.runs)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.countRunsDay()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 30,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"2.66"
|
||||
]
|
||||
},
|
||||
"execution_count": 30,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunTimeDay(day=\"2023-1-11\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 31,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"11.08"
|
||||
]
|
||||
},
|
||||
"execution_count": 31,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunPercentDay(day=\"2023-1-11\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 32,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"0.61"
|
||||
]
|
||||
},
|
||||
"execution_count": 32,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunPercentMultiDay()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 33,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"4.42"
|
||||
]
|
||||
},
|
||||
"execution_count": 33,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunTimeMultiDay()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 34,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "KeyError",
|
||||
"evalue": "'2023-1-17'",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb Cell 12\u001b[0m in \u001b[0;36mRuntimeStats.addHertzDataPoint\u001b[0;34m(self, frequency)\u001b[0m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=9'>10</a>\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m---> <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=10'>11</a>\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mruns[\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtodayString][\u001b[39m\"\u001b[39m\u001b[39mrun_\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m+\u001b[39m \u001b[39mstr\u001b[39m(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mcurrentRun)][\u001b[39m\"\u001b[39m\u001b[39mfrequencies\u001b[39m\u001b[39m\"\u001b[39m]\u001b[39m.\u001b[39mappend(frequency)\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=11'>12</a>\u001b[0m \u001b[39mexcept\u001b[39;00m:\n",
|
||||
"\u001b[0;31mKeyError\u001b[0m: '2023-1-17'",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb Cell 12\u001b[0m in \u001b[0;36m<cell line: 1>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=0'>1</a>\u001b[0m rts\u001b[39m.\u001b[39;49maddHertzDataPoint(\u001b[39m67\u001b[39;49m)\n",
|
||||
"\u001b[1;32m/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb Cell 12\u001b[0m in \u001b[0;36mRuntimeStats.addHertzDataPoint\u001b[0;34m(self, frequency)\u001b[0m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=10'>11</a>\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mruns[\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mtodayString][\u001b[39m\"\u001b[39m\u001b[39mrun_\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m+\u001b[39m \u001b[39mstr\u001b[39m(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mcurrentRun)][\u001b[39m\"\u001b[39m\u001b[39mfrequencies\u001b[39m\u001b[39m\"\u001b[39m]\u001b[39m.\u001b[39mappend(frequency)\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=11'>12</a>\u001b[0m \u001b[39mexcept\u001b[39;00m:\n\u001b[0;32m---> <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=12'>13</a>\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mruns[\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtodayString][\u001b[39m\"\u001b[39m\u001b[39mrun_\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m+\u001b[39m \u001b[39mstr\u001b[39m(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mcurrentRun)][\u001b[39m\"\u001b[39m\u001b[39mfrequencies\u001b[39m\u001b[39m\"\u001b[39m] \u001b[39m=\u001b[39m [frequency]\n",
|
||||
"\u001b[0;31mKeyError\u001b[0m: '2023-1-17'"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.addHertzDataPoint(67)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"74.92\n",
|
||||
"2023-1-10 missing frequency data for run_2\n",
|
||||
"2023-1-9 missing frequency data for run_2\n",
|
||||
"70.48\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(rts.calculateAverageHertzDay(\"2023-1-11\"))\n",
|
||||
"print(rts.calculateAverageHertzMultiDay())"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"runs = {\"run_1\" : {}}\n",
|
||||
"runs[\"run_1\"][\"start\"] = dt.timestamp(dt.now())\n",
|
||||
"runs[\"run_1\"][\"end\"] = dt.timestamp(dt.now() + td(0,3600))\n",
|
||||
"\n",
|
||||
"runs[\"run_2\"] = {}\n",
|
||||
"runs[\"run_2\"][\"start\"] = dt.timestamp(dt.now() + td(0,3600) +td(0,3600))\n",
|
||||
"\n",
|
||||
"runs[\"run_2\"][\"end\"] = dt.timestamp(dt.now() + td(0,3600) +td(0,3600) + td(0,3600))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"total_time = 0\n",
|
||||
"for key in list(runs.keys()):\n",
|
||||
" total_time = runs[key][\"end\"] - runs[key][\"start\"] + total_time"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(total_time)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"today = dt.today()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 39,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"1673991101.567802\n",
|
||||
"1674021599.0\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(dt.timestamp(dt.now()))\n",
|
||||
"print(dt.timestamp(dt.strptime(rts.todayString + \" 23:59:59\", \"%Y-%m-%d %H:%M:%S\")))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 47,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'2023-01-17'"
|
||||
]
|
||||
},
|
||||
"execution_count": 47,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"dt.strftime(dt.now(), \"%Y-%m-%d\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"{str(today.year)+\"-\"+str(today.month)+\"-\"+str(today.day): {\"run_1\": {\"start\": dt.timestamp(dt.now()), \"end\": dt.timestamp(dt.now()), \"hz\": [56,60,57,61,59,57,60]}}}"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"a = [1,2,4,5]\n",
|
||||
"b = [6,7,8,8,89]\n",
|
||||
"c = []\n",
|
||||
"c += a\n",
|
||||
"c += b\n",
|
||||
"print(math.fsum(c)/len(c))\n",
|
||||
"print((math.fsum(a)/len(a) + math.fsum(b)/len(b))/2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 35,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"works\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"t = {\"this\": \"test1\", \"that\": {\"is\": \"a bigger test\"}}\n",
|
||||
"del t[\"this\"]\n",
|
||||
"try:\n",
|
||||
" t[\"those\"]\n",
|
||||
"except:\n",
|
||||
" print(\"works\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 59,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Saving Runs\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.addDay()\n",
|
||||
"rts.saveDataToFile(filePath=path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 78,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"2023-01-17\n",
|
||||
"2022-12-17\n",
|
||||
"31\n",
|
||||
"31\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"days = list(rts.runs.keys())\n",
|
||||
"days.sort()\n",
|
||||
"print(days[-1])\n",
|
||||
"print(days[0])\n",
|
||||
"print((dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days)\n",
|
||||
"if (dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days > 30:\n",
|
||||
" print((dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 110,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"True\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"s = \"Operating in Forward;\"\n",
|
||||
"if \"Operating\" in s:\n",
|
||||
" print(True)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "webkit",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.12 | packaged by conda-forge | (main, Mar 24 2022, 23:25:14) \n[Clang 12.0.1 ]"
|
||||
},
|
||||
"orig_nbformat": 4,
|
||||
"vscode": {
|
||||
"interpreter": {
|
||||
"hash": "22238595996e71d7b27448e64f75d285aa95d1182295fdd30f75905446cf0091"
|
||||
}
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
11
meshifyDrivers/piflow/testRTS.py
Normal file
11
meshifyDrivers/piflow/testRTS.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from runtimeStats import RuntimeStats as RTS
|
||||
|
||||
|
||||
rts = RTS()
|
||||
rts.loadDataFromFile("/Users/nico/Documents/test/runtimestats.json")
|
||||
rts.startRun()
|
||||
#rts.endRun()
|
||||
rts.saveDataToFile("/Users/nico/Documents/test/runtimestats.json")
|
||||
print(rts.runs)
|
||||
|
||||
|
||||
63
meshifyDrivers/piflow/utilities.py
Normal file
63
meshifyDrivers/piflow/utilities.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Utility functions for the driver."""
|
||||
import socket
|
||||
import struct
|
||||
import urllib
|
||||
import contextlib
|
||||
def get_private_ip_address():
|
||||
"""Find the private IP Address of the host device."""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
sock.connect(("8.8.8.8", 80))
|
||||
ip_address = sock.getsockname()[0]
|
||||
sock.close()
|
||||
except Exception as e:
|
||||
return e
|
||||
|
||||
return ip_address
|
||||
|
||||
def get_public_ip_address():
|
||||
ip_address = "0.0.0.0"
|
||||
try:
|
||||
with contextlib.closing(urllib.urlopen("http://checkip.amazonaws.com")) as url:
|
||||
ip_address = url.read()
|
||||
except Exception as e:
|
||||
print("Could not resolve address: {}".format(e))
|
||||
return ip_address
|
||||
return ip_address
|
||||
|
||||
|
||||
def int_to_float16(int_to_convert):
|
||||
"""Convert integer into float16 representation."""
|
||||
bin_rep = ('0' * 16 + '{0:b}'.format(int_to_convert))[-16:]
|
||||
sign = 1.0
|
||||
if int(bin_rep[0]) == 1:
|
||||
sign = -1.0
|
||||
exponent = float(int(bin_rep[1:6], 2))
|
||||
fraction = float(int(bin_rep[6:17], 2))
|
||||
|
||||
if exponent == float(0b00000):
|
||||
return sign * 2 ** -14 * fraction / (2.0 ** 10.0)
|
||||
elif exponent == float(0b11111):
|
||||
if fraction == 0:
|
||||
return sign * float("inf")
|
||||
return float("NaN")
|
||||
frac_part = 1.0 + fraction / (2.0 ** 10.0)
|
||||
return sign * (2 ** (exponent - 15)) * frac_part
|
||||
|
||||
|
||||
def ints_to_float(int1, int2):
|
||||
"""Convert 2 registers into a floating point number."""
|
||||
mypack = struct.pack('>HH', int1, int2)
|
||||
f_unpacked = struct.unpack('>f', mypack)
|
||||
print("[{}, {}] >> {}".format(int1, int2, f_unpacked[0]))
|
||||
return f_unpacked[0]
|
||||
|
||||
|
||||
def degf_to_degc(temp_f):
|
||||
"""Convert deg F to deg C."""
|
||||
return (temp_f - 32.0) * (5.0/9.0)
|
||||
|
||||
|
||||
def degc_to_degf(temp_c):
|
||||
"""Convert deg C to deg F."""
|
||||
return temp_c * 1.8 + 32.0
|
||||
300
meshifyDrivers/plcfreshwater/Channel.py
Normal file
300
meshifyDrivers/plcfreshwater/Channel.py
Normal file
@@ -0,0 +1,300 @@
|
||||
"""Define Meshify channel class."""
|
||||
import time
|
||||
import urllib
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from pycomm.cip.cip_base import CommError, DataError
|
||||
from file_logger import filelogger as log
|
||||
|
||||
|
||||
|
||||
TAG_DATAERROR_SLEEPTIME = 5
|
||||
|
||||
def binarray(intval):
|
||||
"""Split an integer into its bits."""
|
||||
bin_string = '{0:08b}'.format(intval)
|
||||
bin_arr = [i for i in bin_string]
|
||||
bin_arr.reverse()
|
||||
return bin_arr
|
||||
|
||||
|
||||
def read_tag(addr, tag, plc_type="CLX"):
|
||||
"""Read a tag from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
val = clx.read_tag(tag)
|
||||
clx.close()
|
||||
return val
|
||||
except DataError as err:
|
||||
clx.close()
|
||||
time.sleep(TAG_DATAERROR_SLEEPTIME)
|
||||
log.error("Data Error during readTag({}, {}): {}".format(addr, tag, err))
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
#clx.close()
|
||||
log.error("Could not connect during readTag({}, {})".format(addr, tag))
|
||||
except AttributeError as err:
|
||||
clx.close()
|
||||
log.error("AttributeError during readTag({}, {}): \n{}".format(addr, tag, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
def read_array(addr, tag, start, end, plc_type="CLX"):
|
||||
"""Read an array from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
val = clx.read_tag(tag_w_index)
|
||||
arr_vals.append(round(val[0], 4))
|
||||
if arr_vals:
|
||||
clx.close()
|
||||
return arr_vals
|
||||
else:
|
||||
log.error("No length for {}".format(addr))
|
||||
clx.close()
|
||||
return False
|
||||
except Exception:
|
||||
log.error("Error during readArray({}, {}, {}, {})".format(addr, tag, start, end))
|
||||
err = clx.get_status()
|
||||
clx.close()
|
||||
log.error(err)
|
||||
clx.close()
|
||||
|
||||
|
||||
def write_tag(addr, tag, val, plc_type="CLX"):
|
||||
"""Write a tag value to the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
initial_val = clx.read_tag(tag)
|
||||
write_status = clx.write_tag(tag, val, initial_val[1])
|
||||
clx.close()
|
||||
return write_status
|
||||
except DataError as err:
|
||||
clx_err = clx.get_status()
|
||||
clx.close()
|
||||
log.error("--\nDataError during writeTag({}, {}, {}, plc_type={}) -- {}\n{}\n".format(addr, tag, val, plc_type, err, clx_err))
|
||||
|
||||
except CommError as err:
|
||||
clx_err = clx.get_status()
|
||||
log.error("--\nCommError during write_tag({}, {}, {}, plc_type={})\n{}\n--".format(addr, tag, val, plc_type, err))
|
||||
#clx.close()
|
||||
return False
|
||||
|
||||
|
||||
class Channel(object):
|
||||
"""Holds the configuration for a Meshify channel."""
|
||||
|
||||
def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
self.mesh_name = mesh_name
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def __str__(self):
|
||||
"""Create a string for the channel."""
|
||||
return "{}\nvalue: {}, last_send_time: {}".format(self.mesh_name, self.value, self.last_send_time)
|
||||
|
||||
def check(self, new_value, force_send=False):
|
||||
"""Check to see if the new_value needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if new_value is None:
|
||||
new_value = self.value
|
||||
if self.data_type == 'BOOL' or self.data_type == 'STRING':
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif self.value != new_value:
|
||||
if self.map_:
|
||||
if (not self.value == self.map_[new_value]) or force_send:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
else:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
else:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif abs(self.value - new_value) > self.chg_threshold:
|
||||
send_needed = True
|
||||
send_reason = "change threshold"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
if send_needed:
|
||||
self.last_value = self.value
|
||||
if self.map_:
|
||||
try:
|
||||
self.value = self.map_[new_value]
|
||||
except KeyError:
|
||||
log.error("Cannot find a map value for {} in {} for {}".format(new_value, self.map_, self.mesh_name))
|
||||
self.value = new_value
|
||||
else:
|
||||
self.value = new_value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
pass
|
||||
|
||||
|
||||
def identity(sent):
|
||||
"""Return exactly what was sent to it."""
|
||||
return sent
|
||||
|
||||
|
||||
class ModbusChannel(Channel):
|
||||
"""Modbus channel object."""
|
||||
|
||||
def __init__(self, mesh_name, register_number, data_type, chg_threshold, guarantee_sec, channel_size=1, map_=False, write_enabled=False, transform_fn=identity):
|
||||
"""Initialize the channel."""
|
||||
super(ModbusChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.mesh_name = mesh_name
|
||||
self.register_number = register_number
|
||||
self.channel_size = channel_size
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.transform_fn = transform_fn
|
||||
|
||||
def read(self, mbsvalue):
|
||||
"""Return the transformed read value."""
|
||||
return self.transform_fn(mbsvalue)
|
||||
|
||||
|
||||
class PLCChannel(Channel):
|
||||
"""PLC Channel Object."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False, plc_type='CLX'):
|
||||
"""Initialize the channel."""
|
||||
super(PLCChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.plc_type = plc_type
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
plc_value = None
|
||||
if self.plc_tag and self.plc_ip:
|
||||
read_value = read_tag(self.plc_ip, self.plc_tag, plc_type=self.plc_type)
|
||||
if read_value:
|
||||
plc_value = read_value[0]
|
||||
|
||||
return plc_value
|
||||
|
||||
|
||||
class BoolArrayChannels(Channel):
|
||||
"""Hold the configuration for a set of boolean array channels."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
super(BoolArrayChannels, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def compare_values(self, new_val_dict):
|
||||
"""Compare new values to old values to see if the values need storing."""
|
||||
send = False
|
||||
for idx in new_val_dict:
|
||||
try:
|
||||
if new_val_dict[idx] != self.last_value[idx]:
|
||||
send = True
|
||||
except KeyError:
|
||||
log.error("Key Error in self.compare_values for index {}".format(idx))
|
||||
send = True
|
||||
return send
|
||||
|
||||
def read(self, force_send=False):
|
||||
"""Read the value and check to see if needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.plc_tag:
|
||||
val = read_tag(self.plc_ip, self.plc_tag)
|
||||
if val:
|
||||
bool_arr = binarray(val[0])
|
||||
new_val = {}
|
||||
for idx in self.map_:
|
||||
try:
|
||||
new_val[self.map_[idx]] = bool_arr[idx]
|
||||
except KeyError:
|
||||
log.error("Not able to get value for index {}".format(idx))
|
||||
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.compare_values(new_val):
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
|
||||
if send_needed:
|
||||
self.value = new_val
|
||||
self.last_value = self.value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
33
meshifyDrivers/plcfreshwater/Tags.py
Normal file
33
meshifyDrivers/plcfreshwater/Tags.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from Channel import PLCChannel, ModbusChannel
|
||||
from plcfreshwater import PLC_IP_ADDRESS
|
||||
|
||||
tags = [
|
||||
PLCChannel(PLC_IP_ADDRESS, "scaled_flow_meter","Scaled_Flow_Meter","REAL", 10, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "scaled_pressure_transducer","Scaled_Pressure_Transducer","REAL", 3, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_hand_input","Raw_Hand_Input","BOOL", 1, 7200, plc_type="Micro800", map_={0: "Off", 1: "On", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_auto_input","Raw_Auto_Input","BOOL", 1, 7200, plc_type="Micro800", map_={0: "Off", 1: "On", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_run_status","Raw_Run_Status","BOOL", 1, 3600, plc_type="Micro800", map_={0: "Stopped", 1: "Running", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_local_start","Raw_Local_Start","BOOL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "lifetime_flow_meter_gal","Lifetime_Flow_Meter_Gal","REAL", 1000, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "spt_flow_meter_unit","SPT_Flow_Meter_Unit","BOOL", 0, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_overload_status", "Raw_Overload_Status", "BOOL", 0, 3600, plc_type="Micro800", map_={0: "Good", 1: "Down on Overload Tripped", None: "N/A"})
|
||||
]
|
||||
|
||||
tags_totalizer = [
|
||||
PLCChannel(PLC_IP_ADDRESS, "scaled_flow_meter","Scaled_Flow_Meter","REAL", 10, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "scaled_pressure_transducer","Scaled_Pressure_Transducer","REAL", 3, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_hand_input","Raw_Hand_Input","BOOL", 1, 7200, plc_type="Micro800", map_={0: "Off", 1: "On", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_auto_input","Raw_Auto_Input","BOOL", 1, 7200, plc_type="Micro800", map_={0: "Off", 1: "On", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_run_status","Raw_Run_Status","BOOL", 1, 3600, plc_type="Micro800", map_={0: "Stopped", 1: "Running", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_local_start","Raw_Local_Start","BOOL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_yesterday_gal","Totalizer_FM_Yesterday_Total_Gal","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_day_gal","Totalizer_FM_Current_Day_Total_Gal","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_last_month_gal","Totalizer_FM_Last_Month_Gal","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_month_gal","Totalizer_FM_Current_Month_Gal","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_yesterday_bbls","Totalizer_FM_Yesterday_Total_BBLs","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_day_bbls","Totalizer_FM_Current_Day_Total_BBLs","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_last_month_bbls","Totalizer_FM_Last_Month_BBLs","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_month_bbls","Totalizer_FM_Current_Month_BBLs","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "spt_flow_meter_unit","SPT_Flow_Meter_Unit","BOOL", 0, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_overload_status", "Raw_Overload_Status", "BOOL", 0, 3600, plc_type="Micro800", map_={0: "Good", 1: "Down on Overload Tripped", None: "N/A"})
|
||||
]
|
||||
14
meshifyDrivers/plcfreshwater/config.txt
Normal file
14
meshifyDrivers/plcfreshwater/config.txt
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"files": {
|
||||
"file3": "file_logger.py",
|
||||
"file2": "Channel.py",
|
||||
"file1": "plcfreshwater.py",
|
||||
"file6": "persistence.py",
|
||||
"file5": "utilities.py",
|
||||
"file4": "Tags.py"
|
||||
},
|
||||
"deviceName": "plcfreshwater",
|
||||
"releaseVersion": "16",
|
||||
"driverFileName": "plcfreshwater.py",
|
||||
"driverId": "0100"
|
||||
}
|
||||
360
meshifyDrivers/plcfreshwater/device_base.py
Normal file
360
meshifyDrivers/plcfreshwater/device_base.py
Normal file
@@ -0,0 +1,360 @@
|
||||
import types
|
||||
import traceback
|
||||
import binascii
|
||||
import threading
|
||||
import time
|
||||
import thread
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import textwrap
|
||||
import Queue
|
||||
import json
|
||||
|
||||
|
||||
class deviceBase():
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
self.offset = offset
|
||||
self.company = companyId
|
||||
self.name = name
|
||||
self.number = number
|
||||
self.q = Q
|
||||
self.deviceName = name + '_[' + mac + ':' + number[0:2] + ':' + number[2:] + ']!'
|
||||
self.chName = "M1" + '_[' + mac + ':'
|
||||
self.chName2 = '_[' + mac + ':'
|
||||
print 'device name is:'
|
||||
print self.deviceName
|
||||
mac2 = mac.replace(":", "")
|
||||
self.mac = mac2.upper()
|
||||
self.address = 1
|
||||
self.debug = True
|
||||
self.mcu = mcu
|
||||
self.firstRun = True
|
||||
self.mqtt = mqtt
|
||||
self.nodes = Nodes
|
||||
#local dictionary of derived nodes ex: localNodes[tank_0199] = self
|
||||
self.localNodes = {}
|
||||
os.system("chmod 777 /root/reboot")
|
||||
os.system("echo nameserver 8.8.8.8 > /etc/resolv.conf")
|
||||
#Queue for imcoming sets
|
||||
self.loraQ = Queue.Queue()
|
||||
|
||||
self.knownIDs = []
|
||||
thread.start_new_thread(self.getSetsThread, ())
|
||||
|
||||
def getSetsThread(self):
|
||||
|
||||
while True:
|
||||
try:
|
||||
item = self.loraQ.get(block=True, timeout=600)
|
||||
try:
|
||||
print "here is the item from the sets q"
|
||||
print item
|
||||
if len(item) == 2:
|
||||
techname = str(json.loads(item[1])[0]['payload']['name'].split(".")[0])
|
||||
channel = str(json.loads(item[1])[0]['payload']['name'].split(".")[1])
|
||||
name = techname.split("_")[0]
|
||||
id = techname.split("_")[1][1:-2].replace(":","").upper()
|
||||
value = json.loads(item[1])[0]['payload']['value']
|
||||
msgId = json.loads(item[1])[0]['msgId']
|
||||
|
||||
print channel, value, id, name, msgId
|
||||
success = self.specificSets(channel, value, id, name)
|
||||
|
||||
if success == True:
|
||||
print "SUCCESS ON SET"
|
||||
if int(msgId) == 0:
|
||||
return
|
||||
lc = self.getTime()
|
||||
|
||||
value = str(self.mac) + " Success Setting: " + channel + " To: " + value
|
||||
msg = """[ { "value":"%s", "timestamp":"%s", "msgId":"%s" } ]""" % (value, str(lc), msgId)
|
||||
print value
|
||||
print msg
|
||||
topic = "meshify/responses/" + str(msgId)
|
||||
print topic
|
||||
self.q.put([topic, str(msg), 2])
|
||||
|
||||
|
||||
else:
|
||||
|
||||
lc = self.getTime()
|
||||
if success == False:
|
||||
reason = "(Internal Gateway/Device Error)"
|
||||
else:
|
||||
reason = success
|
||||
value = str(self.mac) + " Failed Setting: " + channel + " To: " + value + " " + reason
|
||||
msg = """[ { "value":"%s", "timestamp":"%s", "msgId":"%s" } ]""" % (value, str(lc), msgId)
|
||||
topic = "meshify/responses/" + msgId
|
||||
self.q.put([topic, str(msg), 2])
|
||||
|
||||
except:
|
||||
if int(msgId) == 0:
|
||||
return
|
||||
lc = self.getTime()
|
||||
value = str(self.mac) + " Failed Setting: " + channel + " To: " + value + " (No Callback Found)"
|
||||
msg = """[ { "value":"%s", "timestamp":"%s", "msgId":"%s" } ]""" % (value, str(lc), msgId)
|
||||
topic = "meshify/responses/" + msgId
|
||||
self.q.put([topic, str(msg), 2])
|
||||
print 'no Set callback found for channel: ' + funcName
|
||||
|
||||
except:
|
||||
print "sets queue timeout, restarting..."
|
||||
|
||||
|
||||
def sendtodbDevLora(self, id, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
|
||||
mac = self.mac
|
||||
|
||||
if deviceName == "mainMeshify":
|
||||
zigmac = "_[01:00:00:00:00:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
else:
|
||||
zigmac = "_[00:00:00:00:00:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
dname = deviceName + zigmac
|
||||
|
||||
#define dname, make id into techname and mac
|
||||
if id not in self.knownIDs:
|
||||
self.knownIDs.append(id)
|
||||
self.mcu.xbees[dname] = self.loraQ
|
||||
|
||||
#meshify/db/330/C493000354FB/ilora/c493000354fb2A6E/a1-v
|
||||
#[ { "value":"0.5635", "timestamp":"1486039316" } ]
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLocLora(self, id, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
|
||||
mac = id
|
||||
while len(mac) < 12:
|
||||
mac = "0" + mac
|
||||
if deviceName == "mainMeshify":
|
||||
zigmac = "_[01:00:00:00:00:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
else:
|
||||
zigmac = "_[00:00:00:00:00:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
dname = deviceName + zigmac
|
||||
|
||||
#define dname, make id into techname and mac
|
||||
if id not in self.knownIDs:
|
||||
self.knownIDs.append(id)
|
||||
topic = str(("meshify/sets/" + str(self.company) + "/" + mac + "/#"))
|
||||
self.mqtt.subscribe(topic, 0)
|
||||
topic = str(("meshify/sets/" + "1" + "/" + mac + "/#"))
|
||||
self.mqtt.subscribe(topic, 0)
|
||||
self.mcu.xbees[dname] = self.loraQ
|
||||
|
||||
#meshify/db/330/C493000354FB/ilora/c493000354fb2A6E/a1-v
|
||||
#[ { "value":"0.5635", "timestamp":"1486039316" } ]
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLocLoraCom(self, id, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
|
||||
mac = "1" + id
|
||||
while len(mac) < 12:
|
||||
mac = "0" + mac
|
||||
|
||||
if deviceName == "mainMeshify":
|
||||
zigmac = "_[01:00:00:00:00:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
else:
|
||||
zigmac = "_[00:00:00:00:01:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
dname = deviceName + zigmac
|
||||
|
||||
#define dname, make id into techname and mac
|
||||
if id not in self.knownIDs:
|
||||
self.knownIDs.append(id)
|
||||
topic = str(("meshify/sets/" + str(self.company) + "/" + mac + "/#"))
|
||||
self.mqtt.subscribe(topic, 0)
|
||||
topic = str(("meshify/sets/" + "1" + "/" + mac + "/#"))
|
||||
self.mqtt.subscribe(topic, 0)
|
||||
self.mcu.xbees[dname] = self.loraQ
|
||||
|
||||
#meshify/db/330/C493000354FB/ilora/c493000354fb2A6E/a1-v
|
||||
#[ { "value":"0.5635", "timestamp":"1486039316" } ]
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLoc(self, ch, channel, value, timestamp, deviceName, mac):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
#make the techname
|
||||
lst = textwrap.wrap(str(mac), width=2)
|
||||
tech = ""
|
||||
for i in range(len(lst)):
|
||||
tech += lst[i].lower() + ":"
|
||||
|
||||
|
||||
chName2 = '_[' + tech
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
if len(ch) > 2:
|
||||
ch = ch[:-2]
|
||||
|
||||
dname = deviceName + chName2 + str(ch) + ":98]!"
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDevJSON(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLora(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if ":" not in ch:
|
||||
ch = ch[0:2] + ":" + ch[2:4]
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch).replace(':', "")
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + "]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDev(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbCH(self, ch, channel, value, timestamp):
|
||||
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(ch)
|
||||
|
||||
dname = self.chName + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodb(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbJSON(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
def getTime(self):
|
||||
return str(int(time.time() + int(self.offset)))
|
||||
|
||||
|
||||
|
||||
|
||||
19
meshifyDrivers/plcfreshwater/file_logger.py
Normal file
19
meshifyDrivers/plcfreshwater/file_logger.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Logging setup for plcfreshwater"""
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import sys
|
||||
from plcfreshwater import TOPIC_MAC
|
||||
|
||||
log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s')
|
||||
log_file = './plcfreshwater_{}.log'.format(TOPIC_MAC)
|
||||
my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=500*1024,
|
||||
backupCount=2, encoding=None, delay=0)
|
||||
my_handler.setFormatter(log_formatter)
|
||||
my_handler.setLevel(logging.INFO)
|
||||
filelogger = logging.getLogger('plcfreshwater')
|
||||
filelogger.setLevel(logging.INFO)
|
||||
filelogger.addHandler(my_handler)
|
||||
|
||||
console_out = logging.StreamHandler(sys.stdout)
|
||||
console_out.setFormatter(log_formatter)
|
||||
filelogger.addHandler(console_out)
|
||||
21
meshifyDrivers/plcfreshwater/persistence.py
Normal file
21
meshifyDrivers/plcfreshwater/persistence.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Data persistance functions."""
|
||||
# if more advanced persistence is needed, use a sqlite database
|
||||
import json
|
||||
|
||||
|
||||
def load(filename="persist.json"):
|
||||
"""Load persisted settings from the specified file."""
|
||||
try:
|
||||
with open(filename, 'r') as persist_file:
|
||||
return json.load(persist_file)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def store(persist_obj, filename="persist.json"):
|
||||
"""Store the persisting settings into the specified file."""
|
||||
try:
|
||||
with open(filename, 'w') as persist_file:
|
||||
return json.dump(persist_obj, persist_file, indent=4)
|
||||
except Exception:
|
||||
return False
|
||||
500
meshifyDrivers/plcfreshwater/plcfreshwater.py
Normal file
500
meshifyDrivers/plcfreshwater/plcfreshwater.py
Normal file
@@ -0,0 +1,500 @@
|
||||
"""Driver for plcfreshwater"""
|
||||
import logging
|
||||
import threading
|
||||
import json
|
||||
import time
|
||||
from random import randint
|
||||
import os
|
||||
from device_base import deviceBase
|
||||
import persistence
|
||||
from utilities import get_public_ip_address, get_private_ip_address
|
||||
from datetime import datetime as dt
|
||||
_ = None
|
||||
|
||||
# GLOBAL VARIABLES
|
||||
WAIT_FOR_CONNECTION_SECONDS = 10
|
||||
IP_CHECK_PERIOD = 60
|
||||
PLC_IP_ADDRESS = ""
|
||||
TOPIC_MAC = ""
|
||||
|
||||
|
||||
# PERSISTENCE FILE
|
||||
IP_TABLE = persistence.load('persist.json')
|
||||
if not IP_TABLE:
|
||||
IP_TABLE = {
|
||||
"000000000001":"192.168.1.201",
|
||||
"000000000002":"192.168.1.202",
|
||||
"000000000003":"192.168.1.203",
|
||||
"000000000004":"192.168.1.211",
|
||||
"000000000005":"192.168.1.210",
|
||||
"000000000006":"192.168.1.208",
|
||||
"000000000007":"192.168.1.209",
|
||||
"000000000008":"192.168.1.208",
|
||||
"000000000009":"192.168.1.209",
|
||||
"000000000010":"192.168.1.210",
|
||||
"000000000011":"192.168.1.211",
|
||||
"000000000012":"192.168.1.212",
|
||||
"000000000013":"192.168.1.213",
|
||||
"000000000014":"192.168.1.214",
|
||||
"000000000015":"192.168.1.215",
|
||||
"000000000016":"192.168.1.216"
|
||||
}
|
||||
persistence.store(IP_TABLE, 'persist.json')
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
"""Start class required by Meshify."""
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None,
|
||||
companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
"""Initialize the driver."""
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q,
|
||||
mcu=mcu, companyId=companyId, offset=offset,
|
||||
mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.version = "16"
|
||||
self.finished = threading.Event()
|
||||
self.force_send = False
|
||||
self.public_ip_address = ""
|
||||
self.public_ip_address_last_checked = 0
|
||||
self.private_ip_address = ""
|
||||
self.plcip = ""
|
||||
self.ping_counter = 0
|
||||
self.plc_ping_status = 'Default'
|
||||
self.flowing = False
|
||||
self.totalizing = False
|
||||
self.totals_counter = 0
|
||||
threading.Thread.start(self)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
"""Register the driver."""
|
||||
# self.sendtodb("log", "BOOM! Booted.", 0)
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""Actually run the driver."""
|
||||
|
||||
for i in range(0, WAIT_FOR_CONNECTION_SECONDS):
|
||||
print("plcfreshwater driver will start in {} seconds".format(WAIT_FOR_CONNECTION_SECONDS - i))
|
||||
time.sleep(1)
|
||||
|
||||
global TOPIC_MAC, PLC_IP_ADDRESS, log, write_tag, read_tag, PERSIST
|
||||
|
||||
TOPIC_MAC = self.mac
|
||||
|
||||
from file_logger import filelogger as log
|
||||
log.info("plcfreshwater startup")
|
||||
log.info("BOOM! Starting plcfreshwater driver...")
|
||||
|
||||
self._check_ip_address()
|
||||
|
||||
self.nodes["plcfreshwater_0199"] = self
|
||||
|
||||
send_loops = 0
|
||||
|
||||
PLC_IP_ADDRESS = IP_TABLE[self.mac]
|
||||
self.plcip = PLC_IP_ADDRESS
|
||||
log.info("PLC IP is {}".format(self.plcip))
|
||||
self.sendtodbDev(1, 'plc_ip_address', self.plcip, 0, 'plcfreshwater')
|
||||
PERSIST = persistence.load('totalizers_{}.json'.format(self.mac))
|
||||
if not PERSIST:
|
||||
PERSIST = {
|
||||
'Todays': 0,
|
||||
'Yesterdays': 0,
|
||||
'Current Months': 0,
|
||||
'Previous Months': 0,
|
||||
'Monthly Holding': 0,
|
||||
'Daily Holding': 0,
|
||||
'Lifetime': 0,
|
||||
'Day': 0,
|
||||
'Month': 0,
|
||||
'Last Report': 0,
|
||||
'Totalizers': False
|
||||
}
|
||||
persistence.store(PERSIST, 'totalizers_{}.json'.format(self.mac))
|
||||
from Channel import PLCChannel, ModbusChannel,read_tag, write_tag, TAG_DATAERROR_SLEEPTIME
|
||||
from Tags import tags, tags_totalizer
|
||||
if PERSIST["Totalizers"]:
|
||||
CHANNELS = tags_totalizer
|
||||
else:
|
||||
CHANNELS = tags
|
||||
while True:
|
||||
now = time.time()
|
||||
if self.force_send:
|
||||
log.warning("FORCE SEND: TRUE")
|
||||
if int(time.time()) % 600 == 0 or self.force_send:
|
||||
if self.force_send:
|
||||
payload = {"ts": time.time()*1000, "values": {}}
|
||||
else:
|
||||
payload = {"ts": round(time.time()/600)*600*1000, "values": {}}
|
||||
resetPayload = {"ts": "", "values": {}}
|
||||
dayReset, weekReset, monthReset, yearReset = False, False, False, False
|
||||
for chan in CHANNELS:
|
||||
try:
|
||||
val = chan.read()
|
||||
if chan.mesh_name == "lifetime_flow_meter_gal":
|
||||
payload["values"]["day_volume"], dayReset = self.totalizeDay(val)
|
||||
#payload["values"]["week_volume"], weekReset = self.totalizeWeek(val)
|
||||
payload["values"]["month_volume"], monthReset = self.totalizeMonth(val)
|
||||
#payload["values"]["year_volume"], yearReset = self.totalizeYear(val)
|
||||
else:
|
||||
if chan.mesh_name == "scaled_flow_meter":
|
||||
if val > 0:
|
||||
self.flowing = True
|
||||
else:
|
||||
self.flowing = False
|
||||
|
||||
payload["values"][chan.mesh_name] = val
|
||||
#time.sleep(TAG_DATAERROR_SLEEPTIME) # sleep to allow Micro800 to handle ENET requests
|
||||
except Exception as e:
|
||||
log.error("Something went wrong in read: {}".format(e))
|
||||
self.check_totals_reset(self.flowing,self.totalizing)
|
||||
# print("plcfreshwater driver still alive...")
|
||||
try:
|
||||
plc_ping = os.system("ping -c 1 " + IP_TABLE[self.mac] + " > /dev/null 2>&1")
|
||||
except Exception as e:
|
||||
log.error("something went wrong in ping: {}".format(e))
|
||||
if plc_ping == 0:
|
||||
if not self.plc_ping_status == "OK":
|
||||
payload["values"]["plc_ping"] = "OK"
|
||||
self.plc_ping_status = "OK"
|
||||
else:
|
||||
if not self.plc_ping_status == "Comms Error to PLC":
|
||||
payload["values"]["plc_ping"] = "Comms Error to PLC"
|
||||
self.plc_ping_status = 'Comms Error to PLC'
|
||||
|
||||
self.sendToTB(json.dumps(payload))
|
||||
self.sendToTBAttributes(json.dumps({"latestReportTime": round(time.time()/600)*600*1000}))
|
||||
if dayReset:
|
||||
resetPayload["values"]["yesterday_volume"] = payload["values"]["day_volume"]
|
||||
resetPayload["values"]["day_volume"] = 0
|
||||
if weekReset:
|
||||
resetPayload["values"]["last_week_volume"] = payload["values"]["week_volume"]
|
||||
resetPayload["values"]["week_volume"] = 0
|
||||
if monthReset:
|
||||
resetPayload["values"]["last_month_volume"] = payload["values"]["month_volume"]
|
||||
resetPayload["values"]["month_volume"] = 0
|
||||
if yearReset:
|
||||
resetPayload["values"]["last_year_volume"] = payload["values"]["year_volume"]
|
||||
resetPayload["values"]["year_volume"] = 0
|
||||
|
||||
if resetPayload["values"]:
|
||||
resetPayload["ts"] = 1 + round(time.time()/600)*600*1000
|
||||
self.sendToTB(json.dumps(resetPayload))
|
||||
|
||||
time.sleep(10)
|
||||
if self.force_send:
|
||||
if send_loops > 2:
|
||||
log.warning("Turning off force_send")
|
||||
self.force_send = False
|
||||
send_loops = 0
|
||||
else:
|
||||
send_loops += 1
|
||||
|
||||
|
||||
if (now - self.public_ip_address_last_checked) > IP_CHECK_PERIOD:
|
||||
self._check_ip_address()
|
||||
|
||||
|
||||
def _check_ip_address(self):
|
||||
"""Check the public IP address and send to Meshify if changed."""
|
||||
self.public_ip_address_last_checked = time.time()
|
||||
test_public_ip = get_public_ip_address()
|
||||
test_public_ip = test_public_ip
|
||||
test_private_ip = get_private_ip_address()
|
||||
if not test_public_ip == self.public_ip_address and not test_public_ip == "0.0.0.0":
|
||||
self.sendtodbDev(1, 'public_ip_address', test_public_ip, 0, 'plcfreshwater')
|
||||
self.public_ip_address = test_public_ip
|
||||
if not test_private_ip == self.private_ip_address:
|
||||
self.sendtodbDev(1, 'private_ip_address', test_private_ip, 0, 'plcfreshwater')
|
||||
self.private_ip_address = test_private_ip
|
||||
hostname = "8.8.8.8"
|
||||
response = 1
|
||||
try:
|
||||
response = os.system("ping -c 1 " + hostname + " > /dev/null 2>&1")
|
||||
except Exception as e:
|
||||
print("Something went wrong in ping: {}".format(e))
|
||||
|
||||
#and then check the response...
|
||||
if response == 0:
|
||||
print(hostname, 'is up!')
|
||||
self.ping_counter = 0
|
||||
else:
|
||||
print(hostname, 'is down!')
|
||||
self.ping_counter += 1
|
||||
|
||||
if self.ping_counter >= 3:
|
||||
print("Rebooting because no internet detected")
|
||||
os.system('reboot')
|
||||
|
||||
def check_totals_reset(self, flowing, totalizing):
|
||||
if flowing and not totalizing:
|
||||
self.totals_counter = self.totals_counter + 1
|
||||
else:
|
||||
self.totals_counter = 0
|
||||
if self.totals_counter >= 3:
|
||||
self.fix_totals()
|
||||
log.info("Would've run fix_totals!!!!")
|
||||
self.totals_counter = 0
|
||||
|
||||
def fix_totals(self):
|
||||
Daily_Holding = PERSIST["Daily Holding"] - PERSIST["Monthly Holding"]
|
||||
new_lifetime = PERSIST["Lifetime"] - PERSIST["Monthly Holding"]
|
||||
resp = write_tag(self.plcip, "Lifetime_Flow_Meter_Gal", new_lifetime, plc_type="Micro800")
|
||||
if resp == True:
|
||||
PERSIST["Daily Holding"] = Daily_Holding
|
||||
PERSIST["Monthly Holding"] = 0.0
|
||||
PERSIST["Lifetime"] = new_lifetime
|
||||
persistence.store(PERSIST, 'totalizers_{}.json'.format(self.mac))
|
||||
log.info("RESETTING TOTALIZERS!!!")
|
||||
|
||||
|
||||
def plcfreshwater_sync(self, name, value):
|
||||
"""Sync all data from the driver."""
|
||||
self.force_send = True
|
||||
# self.sendtodb("log", "synced", 0)
|
||||
return True
|
||||
|
||||
def plcfreshwater_writeplctag(self, name, value):
|
||||
"""Write a value to the PLC."""
|
||||
from Channel import write_tag
|
||||
new_val = json.loads(str(value).replace("'", '"'))
|
||||
tag_n = str(new_val['tag']) # "cmd_Start"
|
||||
val_n = new_val['val']
|
||||
write_res = write_tag(str(PLC_IP_ADDRESS), tag_n, val_n, plc_type="Micro800")
|
||||
print("Result of plcfreshwater_writeplctag(self, {}, {}) = {}".format(name, value, write_res))
|
||||
if write_res is None:
|
||||
write_res = "Error writing to PLC..."
|
||||
return write_res
|
||||
|
||||
def convertPersist(original_json):
|
||||
# Mapping of keys
|
||||
key_mapping = {
|
||||
'Todays': None,
|
||||
'Yesterdays': None,
|
||||
'Current Months': None,
|
||||
'Previous Months': None,
|
||||
'Monthly Holding': 'monthHolding',
|
||||
'Daily Holding': 'dayHolding',
|
||||
'Lifetime': 'lifetime',
|
||||
'Day': 'day',
|
||||
'Month': 'month',
|
||||
'Last Report': None,
|
||||
'Totalizers': 'Totalizers'
|
||||
}
|
||||
now = round(time.time()/600)*600*1000
|
||||
# Convert keys and build new JSON object
|
||||
new_json = {}
|
||||
for original_key, new_key in key_mapping.items():
|
||||
if new_key:
|
||||
new_json[new_key] = original_json[original_key]
|
||||
new_json["year"] = time.gmtime(now/1000.0).tm_year
|
||||
# Convert to JSON format
|
||||
#new_json_str = json.dumps(new_json, indent=4, sort_keys=True)
|
||||
return(new_json)
|
||||
|
||||
def saveTotalizers(self, totalizers):
|
||||
try:
|
||||
with open("/root/python_firmware/totalizers.json", "w") as t:
|
||||
json.dump(totalizers,t)
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
|
||||
def get_totalizers(self):
|
||||
saveFile = "/root/python_firmware/totalizers.json"
|
||||
# Check if the state file exists.
|
||||
if not os.path.exists(saveFile):
|
||||
return {
|
||||
"day": 0,
|
||||
"week": 0,
|
||||
"month": 0,
|
||||
"year": 0,
|
||||
"lifetime": 0,
|
||||
"dayHolding": 0,
|
||||
"weekHolding": 0,
|
||||
"monthHolding": 0,
|
||||
"yearHolding": 0
|
||||
}
|
||||
try:
|
||||
with open("/root/python_firmware/totalizers.json", "r") as t:
|
||||
totalizers = json.load(t)
|
||||
if not totalizers:
|
||||
log.info("-----INITIALIZING TOTALIZERS-----")
|
||||
totalizers = {
|
||||
"day": 0,
|
||||
"week": 0,
|
||||
"month": 0,
|
||||
"year": 0,
|
||||
"lifetime": 0,
|
||||
"dayHolding": 0,
|
||||
"weekHolding": 0,
|
||||
"monthHolding": 0,
|
||||
"yearHolding": 0
|
||||
}
|
||||
except:
|
||||
totalizers = {
|
||||
"day": 0,
|
||||
"week": 0,
|
||||
"month": 0,
|
||||
"year": 0,
|
||||
"lifetime": 0,
|
||||
"dayHolding": 0,
|
||||
"weekHolding": 0,
|
||||
"monthHolding": 0,
|
||||
"yearHolding": 0
|
||||
}
|
||||
return totalizers
|
||||
|
||||
def totalizeDay(self,lifetime):
|
||||
totalizers = self.get_totalizers()
|
||||
if "Daily Holding" in totalizers.keys():
|
||||
totalizers = self.convertPersist(totalizers)
|
||||
now = round(time.time()/600)*600*1000
|
||||
reset = False
|
||||
value = lifetime - totalizers["dayHolding"]
|
||||
if not int(time.gmtime(now/1000.0).tm_day) == int(totalizers["day"]):
|
||||
totalizers["dayHolding"] = lifetime
|
||||
totalizers["day"] = int(now.strftime("%d"))
|
||||
self.saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value,reset)
|
||||
|
||||
def totalizeWeek(self,lifetime):
|
||||
totalizers = self.get_totalizers()
|
||||
now = round(time.time()/600)*600*1000
|
||||
reset = False
|
||||
value = lifetime - totalizers["weekHolding"]
|
||||
if (not now.strftime("%U") == totalizers["week"] and now.strftime("%a") == "Sun") or totalizers["week"] == 0:
|
||||
totalizers["weekHolding"] = lifetime
|
||||
totalizers["week"] = now.strftime("%U")
|
||||
self.saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
|
||||
def totalizeMonth(self,lifetime):
|
||||
totalizers = self.get_totalizers()
|
||||
if "Month Holding" in totalizers.keys():
|
||||
totalizers = self.convertPersist(totalizers)
|
||||
now = round(time.time()/600)*600*1000
|
||||
reset = False
|
||||
value = lifetime - totalizers["monthHolding"]
|
||||
if not int(time.gmtime(now/1000.0).tm_mon) == int(totalizers["month"]):
|
||||
totalizers["monthHolding"] = lifetime
|
||||
totalizers["month"] = now.strftime("%m")
|
||||
self.saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value,reset)
|
||||
|
||||
def totalizeYear(self,lifetime):
|
||||
totalizers = self.get_totalizers()
|
||||
now = round(time.time()/600)*600*1000
|
||||
reset = False
|
||||
value = lifetime - totalizers["yearHolding"]
|
||||
if not int(time.gmtime(now/1000.0).tm_year) == int(totalizers["year"]):
|
||||
totalizers["yearHolding"] = lifetime
|
||||
totalizers["year"] = now.strftime("%Y")
|
||||
self.saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
|
||||
def totalize(self, val):
|
||||
right_now = dt.today()
|
||||
month = right_now.month
|
||||
day = right_now.day
|
||||
#Totalize Today, Yesterday, Month, Last Month
|
||||
#if the stored day is 0 then it's a fresh run of this should initalize values now
|
||||
if PERSIST['Day'] == 0:
|
||||
PERSIST['Day'] = day
|
||||
PERSIST['Month'] = month
|
||||
PERSIST['Daily Holding'] = val
|
||||
PERSIST['Monthly Holding'] = val
|
||||
persistence.store(PERSIST, 'totalizers_{}.json'.format(self.mac))
|
||||
#Communication error during initialization check if lifetime has reported properly and update holdings
|
||||
if PERSIST['Daily Holding'] == None and not(val == None):
|
||||
PERSIST['Daily Holding'] = val
|
||||
PERSIST['Monthly Holding'] = val
|
||||
|
||||
try:
|
||||
if val - PERSIST["Lifetime"] > 0:
|
||||
self.totalizing = True
|
||||
else:
|
||||
self.totalizing = False
|
||||
except:
|
||||
log.error("Error while checking for totalizing")
|
||||
|
||||
try:
|
||||
if val - PERSIST['Daily Holding'] - PERSIST['Todays'] > 500 or time.time() - PERSIST['Last Report'] > 3600 or self.force_send:
|
||||
PERSIST['Todays'] = val - PERSIST['Daily Holding']
|
||||
PERSIST['Current Months'] = val - PERSIST['Monthly Holding']
|
||||
PERSIST['Lifetime'] = val
|
||||
self.sendtodbDev(1, 'total_fm_day_gal', PERSIST['Todays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_day_bbls', PERSIST['Todays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_month_gal', PERSIST['Current Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_month_bbls', PERSIST['Current Months']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_gal', PERSIST['Yesterdays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_bbls', PERSIST['Yesterdays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_gal', PERSIST['Lifetime'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_bbls', PERSIST['Lifetime']/42, 0, 'plcfreshwater')
|
||||
if self.force_send:
|
||||
self.sendtodbDev(1, 'total_fm_last_month_gal', PERSIST['Previous Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_last_month_bbls', PERSIST['Previous Months']/42, 0, 'plcfreshwater')
|
||||
PERSIST['Last Report'] = time.time()
|
||||
except:
|
||||
if time.time() - PERSIST['Last Report'] > 3600 or self.force_send:
|
||||
self.sendtodbDev(1, 'total_fm_day_gal', PERSIST['Todays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_day_bbls', PERSIST['Todays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_month_gal', PERSIST['Current Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_month_bbls', PERSIST['Current Months']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_gal', PERSIST['Yesterdays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_bbls', PERSIST['Yesterdays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_gal', PERSIST['Lifetime'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_bbls', PERSIST['Lifetime']/42, 0, 'plcfreshwater')
|
||||
if self.force_send:
|
||||
self.sendtodbDev(1, 'total_fm_last_month_gal', PERSIST['Previous Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_last_month_bbls', PERSIST['Previous Months']/42, 0, 'plcfreshwater')
|
||||
PERSIST['Last Report'] = time.time()
|
||||
|
||||
#If the current day doesn't equal the stored day roll the dailies over
|
||||
if not(day == PERSIST['Day']):
|
||||
#if a comms error use the stored values else use the latested values
|
||||
if val == None:
|
||||
PERSIST['Yesterdays'] = PERSIST['Todays']
|
||||
PERSIST['Todays'] = 0
|
||||
PERSIST['Daily Holding'] = PERSIST['Lifetime']
|
||||
else:
|
||||
PERSIST['Yesterdays'] = val - PERSIST['Daily Holding']
|
||||
PERSIST['Todays'] = 0
|
||||
PERSIST['Daily Holding'] = val
|
||||
PERSIST['Lifetime'] = val
|
||||
PERSIST['Day'] = day
|
||||
self.sendtodbDev(1, 'total_fm_day_gal', PERSIST['Todays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_day_bbls', PERSIST['Todays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_gal', PERSIST['Yesterdays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_bbls', PERSIST['Yesterdays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_gal', PERSIST['Lifetime'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_bbls', PERSIST['Lifetime']/42, 0, 'plcfreshwater')
|
||||
PERSIST['Last Report'] = time.time()
|
||||
#the day has rolled over if the month also rolls over
|
||||
if not(month == PERSIST['Month']):
|
||||
#if a comms error use the stored values else use the latested values
|
||||
if val == None:
|
||||
PERSIST['Previous Months'] = PERSIST['Current Months']
|
||||
PERSIST['Current Months'] = 0
|
||||
PERSIST['Monthly Holding'] = PERSIST['Lifetime']
|
||||
else:
|
||||
PERSIST['Previous Months'] = val - PERSIST['Monthly Holding']
|
||||
PERSIST['Current Months'] = 0
|
||||
PERSIST['Monthly Holding'] = val
|
||||
PERSIST['Month'] = month
|
||||
self.sendtodbDev(1, 'total_fm_month_gal', PERSIST['Current Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_month_bbls', PERSIST['Current Months']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_last_month_gal', PERSIST['Previous Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_last_month_bbls', PERSIST['Previous Months']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_gal', PERSIST['Lifetime'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_bbls', PERSIST['Lifetime']/42, 0, 'plcfreshwater')
|
||||
PERSIST['Last Report'] = time.time()
|
||||
persistence.store(PERSIST, 'totalizers_{}.json'.format(self.mac))
|
||||
@@ -0,0 +1,55 @@
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from pycomm.cip.cip_base import CommError, DataError
|
||||
import time
|
||||
import sys
|
||||
TAG_DATAERROR_SLEEPTIME = 5
|
||||
|
||||
def read_tag(addr, tag, plc_type="CLX"):
|
||||
"""Read a tag from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
val = clx.read_tag(tag)
|
||||
clx.close()
|
||||
return val
|
||||
except DataError as err:
|
||||
clx.close()
|
||||
time.sleep(TAG_DATAERROR_SLEEPTIME)
|
||||
print("Data Error during readTag({}, {}): {}".format(addr, tag, err))
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
#clx.close()
|
||||
print("Could not connect during readTag({}, {})".format(addr, tag))
|
||||
except AttributeError as err:
|
||||
clx.close()
|
||||
print("AttributeError during readTag({}, {}): \n{}".format(addr, tag, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
def write_tag(addr, tag, val, plc_type="CLX"):
|
||||
"""Write a tag value to the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
initial_val = clx.read_tag(tag)
|
||||
write_status = clx.write_tag(tag, val, initial_val[1])
|
||||
clx.close()
|
||||
return write_status
|
||||
except DataError as err:
|
||||
clx_err = clx.get_status()
|
||||
clx.close()
|
||||
print("--\nDataError during writeTag({}, {}, {}, plc_type={}) -- {}\n{}\n".format(addr, tag, val, plc_type, err, clx_err))
|
||||
except CommError as err:
|
||||
clx_err = clx.get_status()
|
||||
print("--\nCommError during write_tag({}, {}, {}, plc_type={})\n{}\n--".format(addr, tag, val, plc_type, err))
|
||||
#clx.close()
|
||||
return False
|
||||
|
||||
resp = write_tag(sys.argv[1], "Lifetime_Flow_Meter_Gal", float(sys.argv[2]), "Micro800")
|
||||
if resp:
|
||||
print(read_tag(sys.argv[1], "Lifetime_Flow_Meter_Gal", "Micro800"))
|
||||
64
meshifyDrivers/plcfreshwater/utilities.py
Normal file
64
meshifyDrivers/plcfreshwater/utilities.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Utility functions for the driver."""
|
||||
import socket
|
||||
import struct
|
||||
import urllib
|
||||
import contextlib
|
||||
|
||||
def get_private_ip_address():
|
||||
"""Find the private IP Address of the host device."""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
try:
|
||||
sock.connect(("8.8.8.8", 80))
|
||||
except Exception as e:
|
||||
return e
|
||||
ip_address = sock.getsockname()[0]
|
||||
sock.close()
|
||||
return ip_address
|
||||
|
||||
def get_public_ip_address():
|
||||
ip_address = "0.0.0.0"
|
||||
try:
|
||||
with contextlib.closing(urllib.urlopen("http://checkip.amazonaws.com")) as url:
|
||||
ip_address = url.read()
|
||||
except Exception as e:
|
||||
print("could not resolve check IP: {}".format(e))
|
||||
return ip_address
|
||||
return ip_address[:-1]
|
||||
|
||||
|
||||
|
||||
def int_to_float16(int_to_convert):
|
||||
"""Convert integer into float16 representation."""
|
||||
bin_rep = ('0' * 16 + '{0:b}'.format(int_to_convert))[-16:]
|
||||
sign = 1.0
|
||||
if int(bin_rep[0]) == 1:
|
||||
sign = -1.0
|
||||
exponent = float(int(bin_rep[1:6], 2))
|
||||
fraction = float(int(bin_rep[6:17], 2))
|
||||
|
||||
if exponent == float(0b00000):
|
||||
return sign * 2 ** -14 * fraction / (2.0 ** 10.0)
|
||||
elif exponent == float(0b11111):
|
||||
if fraction == 0:
|
||||
return sign * float("inf")
|
||||
return float("NaN")
|
||||
frac_part = 1.0 + fraction / (2.0 ** 10.0)
|
||||
return sign * (2 ** (exponent - 15)) * frac_part
|
||||
|
||||
|
||||
def ints_to_float(int1, int2):
|
||||
"""Convert 2 registers into a floating point number."""
|
||||
mypack = struct.pack('>HH', int1, int2)
|
||||
f_unpacked = struct.unpack('>f', mypack)
|
||||
print("[{}, {}] >> {}".format(int1, int2, f_unpacked[0]))
|
||||
return f_unpacked[0]
|
||||
|
||||
|
||||
def degf_to_degc(temp_f):
|
||||
"""Convert deg F to deg C."""
|
||||
return (temp_f - 32.0) * (5.0/9.0)
|
||||
|
||||
|
||||
def degc_to_degf(temp_c):
|
||||
"""Convert deg C to deg F."""
|
||||
return temp_c * 1.8 + 32.0
|
||||
291
meshifyDrivers/transferlite/Channel.py
Normal file
291
meshifyDrivers/transferlite/Channel.py
Normal file
@@ -0,0 +1,291 @@
|
||||
"""Define Meshify channel class."""
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from pycomm.cip.cip_base import CommError, DataError
|
||||
import time
|
||||
|
||||
|
||||
def binarray(intval):
|
||||
"""Split an integer into its bits."""
|
||||
bin_string = '{0:08b}'.format(intval)
|
||||
bin_arr = [i for i in bin_string]
|
||||
bin_arr.reverse()
|
||||
return bin_arr
|
||||
|
||||
|
||||
def read_tag(addr, tag, plc_type="CLX"):
|
||||
"""Read a tag from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
c = ClxDriver()
|
||||
try:
|
||||
if c.open(addr, direct_connection=direct):
|
||||
try:
|
||||
v = c.read_tag(tag)
|
||||
return v
|
||||
except DataError as e:
|
||||
c.close()
|
||||
print("Data Error during readTag({}, {}): {}".format(addr, tag, e))
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
c.close()
|
||||
print("Could not connect during readTag({}, {})".format(addr, tag))
|
||||
# print err
|
||||
except AttributeError as e:
|
||||
c.close()
|
||||
print("AttributeError during readTag({}, {}): \n{}".format(addr, tag, e))
|
||||
c.close()
|
||||
return False
|
||||
|
||||
|
||||
def read_array(addr, tag, start, end, plc_type="CLX"):
|
||||
"""Read an array from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
c = ClxDriver()
|
||||
if c.open(addr, direct_connection=direct):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
v = c.read_tag(tag_w_index)
|
||||
# print('{} - {}'.format(tag_w_index, v))
|
||||
arr_vals.append(round(v[0], 4))
|
||||
# print(v)
|
||||
if len(arr_vals) > 0:
|
||||
return arr_vals
|
||||
else:
|
||||
print("No length for {}".format(addr))
|
||||
return False
|
||||
except Exception:
|
||||
print("Error during readArray({}, {}, {}, {})".format(addr, tag, start, end))
|
||||
err = c.get_status()
|
||||
c.close()
|
||||
print(err)
|
||||
c.close()
|
||||
|
||||
|
||||
def write_tag(addr, tag, val, plc_type="CLX"):
|
||||
"""Write a tag value to the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
c = ClxDriver()
|
||||
try:
|
||||
if c.open(addr, direct_connection=direct):
|
||||
try:
|
||||
cv = c.read_tag(tag)
|
||||
print(cv)
|
||||
wt = c.write_tag(tag, val, cv[1])
|
||||
return wt
|
||||
except Exception:
|
||||
print("Error during writeTag({}, {}, {})".format(addr, tag, val))
|
||||
err = c.get_status()
|
||||
c.close()
|
||||
print(err)
|
||||
c.close()
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
c.close()
|
||||
print("Could not connect during writeTag({}, {}, {})".format(addr, tag, val))
|
||||
|
||||
|
||||
class Channel(object):
|
||||
"""Holds the configuration for a Meshify channel."""
|
||||
|
||||
def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
self.mesh_name = mesh_name
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def __str__(self):
|
||||
"""Create a string for the channel."""
|
||||
return "{}\nvalue: {}, last_send_time: {}".format(self.mesh_name, self.value, self.last_send_time)
|
||||
|
||||
def check(self, new_value, force_send=False):
|
||||
"""Check to see if the new_value needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.data_type == 'BOOL' or self.data_type == 'STRING':
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif not (self.value == new_value):
|
||||
if self.map_:
|
||||
if not self.value == self.map_[new_value]:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
else:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
else:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif abs(self.value - new_value) > self.chg_threshold:
|
||||
send_needed = True
|
||||
send_reason = "change threshold"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
if send_needed:
|
||||
self.last_value = self.value
|
||||
if self.map_:
|
||||
try:
|
||||
self.value = self.map_[new_value]
|
||||
except KeyError:
|
||||
print("Cannot find a map value for {} in {} for {}".format(new_value, self.map_, self.mesh_name))
|
||||
self.value = new_value
|
||||
else:
|
||||
self.value = new_value
|
||||
self.last_send_time = time.time()
|
||||
print("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
pass
|
||||
|
||||
|
||||
def identity(sent):
|
||||
"""Return exactly what was sent to it."""
|
||||
return sent
|
||||
|
||||
|
||||
class ModbusChannel(Channel):
|
||||
"""Modbus channel object."""
|
||||
|
||||
def __init__(self, mesh_name, register_number, data_type, chg_threshold, guarantee_sec, channel_size=1, map_=False, write_enabled=False, transformFn=identity):
|
||||
"""Initialize the channel."""
|
||||
super(ModbusChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.mesh_name = mesh_name
|
||||
self.register_number = register_number
|
||||
self.channel_size = channel_size
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.transformFn = transformFn
|
||||
|
||||
def read(self, mbsvalue):
|
||||
"""Return the transformed read value."""
|
||||
return self.transformFn(mbsvalue)
|
||||
|
||||
|
||||
class PLCChannel(Channel):
|
||||
"""PLC Channel Object."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False, plc_type='CLX'):
|
||||
"""Initialize the channel."""
|
||||
super(PLCChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.plc_type = plc_type
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
plc_value = None
|
||||
if self.plc_tag and self.plc_ip:
|
||||
read_value = read_tag(self.plc_ip, self.plc_tag, plc_type=self.plc_type)
|
||||
if read_value:
|
||||
plc_value = read_value[0]
|
||||
|
||||
return plc_value
|
||||
|
||||
|
||||
class BoolArrayChannels(Channel):
|
||||
"""Hold the configuration for a set of boolean array channels."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def compare_values(self, new_val_dict):
|
||||
"""Compare new values to old values to see if the values need storing."""
|
||||
send = False
|
||||
for idx in new_val_dict:
|
||||
try:
|
||||
if new_val_dict[idx] != self.last_value[idx]:
|
||||
send = True
|
||||
except KeyError:
|
||||
print("Key Error in self.compare_values for index {}".format(idx))
|
||||
send = True
|
||||
return send
|
||||
|
||||
def read(self, force_send=False):
|
||||
"""Read the value and check to see if needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.plc_tag:
|
||||
v = read_tag(self.plc_ip, self.plc_tag)
|
||||
if v:
|
||||
bool_arr = binarray(v[0])
|
||||
new_val = {}
|
||||
for idx in self.map_:
|
||||
try:
|
||||
new_val[self.map_[idx]] = bool_arr[idx]
|
||||
except KeyError:
|
||||
print("Not able to get value for index {}".format(idx))
|
||||
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.compare_values(new_val):
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
|
||||
if send_needed:
|
||||
self.value = new_val
|
||||
self.last_value = self.value
|
||||
self.last_send_time = time.time()
|
||||
print("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
12
meshifyDrivers/transferlite/config.txt
Normal file
12
meshifyDrivers/transferlite/config.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"files": {
|
||||
"file3": "channel.py",
|
||||
"file2": "utilities.py",
|
||||
"file1": "transferlite.py",
|
||||
"file4": "file_logger.py"
|
||||
},
|
||||
"deviceName": "transferlite",
|
||||
"driverId": "0230",
|
||||
"releaseVersion": "4",
|
||||
"driverFileName": "transferlite.py"
|
||||
}
|
||||
18
meshifyDrivers/transferlite/file_logger.py
Normal file
18
meshifyDrivers/transferlite/file_logger.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Logging setup for {{cookiecutter.driver_name}}"""
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import sys
|
||||
|
||||
log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s')
|
||||
log_file = './transferlite.log'
|
||||
my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=500*1024,
|
||||
backupCount=2, encoding=None, delay=0)
|
||||
my_handler.setFormatter(log_formatter)
|
||||
my_handler.setLevel(logging.INFO)
|
||||
filelogger = logging.getLogger('transferlite')
|
||||
filelogger.setLevel(logging.INFO)
|
||||
filelogger.addHandler(my_handler)
|
||||
|
||||
console_out = logging.StreamHandler(sys.stdout)
|
||||
console_out.setFormatter(log_formatter)
|
||||
filelogger.addHandler(console_out)
|
||||
21
meshifyDrivers/transferlite/persistence.py
Normal file
21
meshifyDrivers/transferlite/persistence.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Data persistance functions."""
|
||||
# if more advanced persistence is needed, use a sqlite database
|
||||
import json
|
||||
|
||||
|
||||
def load(filename="persist.json"):
|
||||
"""Load persisted settings from the specified file."""
|
||||
try:
|
||||
with open(filename, 'r') as persist_file:
|
||||
return json.load(persist_file)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def store(persist_obj, filename="persist.json"):
|
||||
"""Store the persisting settings into the specified file."""
|
||||
try:
|
||||
with open(filename, 'w') as persist_file:
|
||||
return json.dump(persist_obj, persist_file, indent=4)
|
||||
except Exception:
|
||||
return False
|
||||
188
meshifyDrivers/transferlite/transferlite.py
Normal file
188
meshifyDrivers/transferlite/transferlite.py
Normal file
@@ -0,0 +1,188 @@
|
||||
"""Driver for transferlite"""
|
||||
|
||||
import threading
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
from random import randint
|
||||
from device_base import deviceBase
|
||||
from channel import PLCChannel, read_tag, write_tag,TAG_DATAERROR_SLEEPTIME
|
||||
from utilities import get_public_ip_address
|
||||
from file_logger import filelogger as log
|
||||
|
||||
|
||||
_ = None
|
||||
log.info("transferlite startup")
|
||||
|
||||
TRUE_FALSE = {
|
||||
0: "false",
|
||||
1: "true"
|
||||
}
|
||||
|
||||
AUTO_MANUAL = {
|
||||
0: "Auto",
|
||||
1: "Manual"
|
||||
}
|
||||
|
||||
PHASE_STATES = {
|
||||
1: "Running",
|
||||
2: "Holding",
|
||||
4: "Restarting",
|
||||
8: "Stopping",
|
||||
16: "Aborting",
|
||||
32: "Resetting",
|
||||
64: "Idle",
|
||||
128: "Held",
|
||||
256: "Complete",
|
||||
512: "Stopped",
|
||||
1024: "Aborted"
|
||||
}
|
||||
|
||||
# GLOBAL VARIABLES
|
||||
WAIT_FOR_CONNECTION_SECONDS = 60
|
||||
IP_CHECK_PERIOD = 60
|
||||
WATCHDOG_ENABLE = True
|
||||
WATCHDOG_CHECK_PERIOD = 60
|
||||
WATCHDOG_SEND_PERIOD = 3600 # Seconds, the longest amount of time before sending the watchdog status
|
||||
PLC_IP_ADDRESS = "192.168.1.10"
|
||||
CHANNELS = [
|
||||
PLCChannel(PLC_IP_ADDRESS, "ft01_flowmeter_bpd", "FT01_Flowmeter_BPD", "REAL", 100.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "auto_manual", "sts_autoMode", "STRING", 1, 600, map_=AUTO_MANUAL),
|
||||
PLCChannel(PLC_IP_ADDRESS, "ft01_flowmeter_gpm", "FT01_Flowmeter.val", "REAL", 10.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "lt11_pondlevel", "LT11_PondLevel.val", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "lt21_pondlevel", "LT21_PondLevel.val", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "system1_hasleveltransmitter", "cfg_System1.hasLevelTransmitter", "STRING", 1.0, 600, map_=TRUE_FALSE),
|
||||
PLCChannel(PLC_IP_ADDRESS, "system2_hasleveltransmitter", "cfg_System2.hasLevelTransmitter", "STRING", 1.0, 600, map_=TRUE_FALSE),
|
||||
PLCChannel(PLC_IP_ADDRESS, "pt11_dischargepressure", "PT11_DischargePressure.val", "REAL", 10, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "pt21_dischargepressure", "PT21_DischargePressure.val", "REAL", 10, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "flow_rate_setpoint", "set_FlowRateSetpoint", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "system1_frequency_setpoint", "set_ManualFrequencySP_System1", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "system2_frequency_setpoint", "set_ManualFrequencySP_System2", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "ft01_flowmeter_bpd_yesterday", "FT01_Flowmeter_History[1]", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "ft01_flowmeter_bpd_today", "FT01_Flowmeter_History[0]", "REAL",100.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "mc11_motorfrequency", "MC11_Pump.status.speedFeedback", "REAL", 5.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "mc21_motorfrequency", "MC21_Pump.status.speedFeedback", "REAL", 5.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "state_supervisor", "Supervisor.State", "STRING", 1.0, 600, map_=PHASE_STATES),
|
||||
PLCChannel(PLC_IP_ADDRESS, "state_system1", "System1.State", "STRING", 1.0, 600, map_=PHASE_STATES),
|
||||
PLCChannel(PLC_IP_ADDRESS, "state_system2", "System2.State", "STRING", 1.0, 600, map_=PHASE_STATES)
|
||||
]
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
"""Start class required by Meshify."""
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None,
|
||||
companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
"""Initialize the driver."""
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q,
|
||||
mcu=mcu, companyId=companyId, offset=offset,
|
||||
mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.version = "4"
|
||||
self.finished = threading.Event()
|
||||
self.force_send = False
|
||||
self.public_ip_address = ""
|
||||
self.public_ip_address_last_checked = 0
|
||||
self.watchdog = False
|
||||
self.watchdog_last_checked = 0
|
||||
self.watchdog_last_sent = 0
|
||||
threading.Thread.start(self)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
"""Register the driver."""
|
||||
#self.sendtodbDev("log", "BOOM! Booted.", 0)
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""Actually run the driver."""
|
||||
for i in range(0, WAIT_FOR_CONNECTION_SECONDS):
|
||||
print("transferlite driver will start in {} seconds".format(WAIT_FOR_CONNECTION_SECONDS - i))
|
||||
time.sleep(1)
|
||||
log.info("BOOM! Starting transferlite driver...")
|
||||
|
||||
self._check_ip_address()
|
||||
self._check_watchdog()
|
||||
|
||||
self.nodes["transferlite_0199"] = self
|
||||
|
||||
send_loops = 0
|
||||
|
||||
while True:
|
||||
now = time.time()
|
||||
if self.force_send:
|
||||
log.warning("FORCE SEND: TRUE")
|
||||
|
||||
for chan in CHANNELS:
|
||||
read_val = chan.read()
|
||||
if chan.check(read_val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'transferlite')
|
||||
time.sleep(TAG_DATAERROR_SLEEPTIME)
|
||||
|
||||
# print("transferlite driver still alive...")
|
||||
if self.force_send:
|
||||
if send_loops > 2:
|
||||
log.warning("Turning off force_send")
|
||||
self.force_send = False
|
||||
send_loops = 0
|
||||
else:
|
||||
send_loops += 1
|
||||
|
||||
if WATCHDOG_ENABLE:
|
||||
if (now - self.watchdog_last_checked) > WATCHDOG_CHECK_PERIOD:
|
||||
self._check_watchdog()
|
||||
|
||||
if (now - self.public_ip_address_last_checked) > IP_CHECK_PERIOD:
|
||||
self._check_ip_address()
|
||||
|
||||
|
||||
def _check_watchdog(self):
|
||||
"""Check the watchdog and send to Meshify if changed or stale."""
|
||||
test_watchdog = self.transferlite_watchdog()
|
||||
now = time.time()
|
||||
self.watchdog_last_checked = now
|
||||
if test_watchdog != self.watchdog or (now - self.watchdog_last_sent) > WATCHDOG_SEND_PERIOD:
|
||||
self.sendtodbDev(1, 'watchdog', test_watchdog, 0, 'transferlite')
|
||||
self.watchdog = test_watchdog
|
||||
self.watchdog_last_sent = now
|
||||
|
||||
|
||||
def _check_ip_address(self):
|
||||
"""Check the public IP address and send to Meshify if changed."""
|
||||
self.public_ip_address_last_checked = time.time()
|
||||
test_public_ip = get_public_ip_address()
|
||||
if not test_public_ip == self.public_ip_address:
|
||||
self.sendtodbDev(1, 'public_ip_address', test_public_ip, 0, 'transferlite')
|
||||
self.public_ip_address = test_public_ip
|
||||
|
||||
def transferlite_watchdog(self):
|
||||
"""Write a random integer to the PLC and then 1 seconds later check that it has been decremented by 1."""
|
||||
randval = randint(0, 32767)
|
||||
write_tag(str(PLC_IP_ADDRESS), 'watchdog_INT', randval)
|
||||
time.sleep(1)
|
||||
watchdog_val = read_tag(str(PLC_IP_ADDRESS), 'watchdog_INT')
|
||||
try:
|
||||
return (randval - 1) == watchdog_val[0]
|
||||
except (KeyError, TypeError):
|
||||
return False
|
||||
|
||||
def transferlite_sync(self, name, value):
|
||||
"""Sync all data from the driver."""
|
||||
self.force_send = True
|
||||
# self.sendtodb("log", "synced", 0)
|
||||
return True
|
||||
|
||||
def transferlite_writeplctag(self, name, value):
|
||||
"""Write a value to the PLC."""
|
||||
new_val = json.loads(str(value).replace("'", '"'))
|
||||
tag_n = str(new_val['tag']) # "cmd_Start"
|
||||
val_n = new_val['val']
|
||||
w = write_tag(str(PLC_IP_ADDRESS), tag_n, val_n)
|
||||
log.info("Result of transferlite_writeplctag(self, {}, {}) = {}".format(name, value, w))
|
||||
if w is None:
|
||||
w = "Error writing to PLC..."
|
||||
return w
|
||||
69
meshifyDrivers/transferlite/utilities.py
Normal file
69
meshifyDrivers/transferlite/utilities.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""Utility functions for the driver."""
|
||||
import socket
|
||||
import struct
|
||||
import time
|
||||
|
||||
|
||||
def get_public_ip_address():
|
||||
"""Find the public IP Address of the host device."""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
sock.connect(("8.8.8.8", 80))
|
||||
ip_address = sock.getsockname()[0]
|
||||
sock.close()
|
||||
return ip_address
|
||||
|
||||
|
||||
def int_to_float16(int_to_convert):
|
||||
"""Convert integer into float16 representation."""
|
||||
bin_rep = ('0' * 16 + '{0:b}'.format(int_to_convert))[-16:]
|
||||
sign = 1.0
|
||||
if int(bin_rep[0]) == 1:
|
||||
sign = -1.0
|
||||
exponent = float(int(bin_rep[1:6], 2))
|
||||
fraction = float(int(bin_rep[6:17], 2))
|
||||
|
||||
if exponent == float(0b00000):
|
||||
return sign * 2 ** -14 * fraction / (2.0 ** 10.0)
|
||||
elif exponent == float(0b11111):
|
||||
if fraction == 0:
|
||||
return sign * float("inf")
|
||||
return float("NaN")
|
||||
frac_part = 1.0 + fraction / (2.0 ** 10.0)
|
||||
return sign * (2 ** (exponent - 15)) * frac_part
|
||||
|
||||
|
||||
def ints_to_float(int1, int2):
|
||||
"""Convert 2 registers into a floating point number."""
|
||||
mypack = struct.pack('>HH', int1, int2)
|
||||
f_unpacked = struct.unpack('>f', mypack)
|
||||
print("[{}, {}] >> {}".format(int1, int2, f_unpacked[0]))
|
||||
return f_unpacked[0]
|
||||
|
||||
|
||||
def degf_to_degc(temp_f):
|
||||
"""Convert deg F to deg C."""
|
||||
return (temp_f - 32.0) * (5.0/9.0)
|
||||
|
||||
|
||||
def degc_to_degf(temp_c):
|
||||
"""Convert deg C to deg F."""
|
||||
return temp_c * 1.8 + 32.0
|
||||
|
||||
|
||||
class Every(object):
|
||||
"""Class that runs a specific method every so often."""
|
||||
|
||||
def __init__(self, func, seconds, run_at_start=True):
|
||||
self.func = func
|
||||
self.seconds = seconds
|
||||
self.last_run = time.time()
|
||||
if run_at_start:
|
||||
self.check(force=True)
|
||||
|
||||
def check(self, force=False):
|
||||
"""Check to see if the function needs to be run."""
|
||||
now = time.time()
|
||||
if ((now - self.last_run) > self.seconds) or force:
|
||||
self.last_run = now
|
||||
return self.func()
|
||||
return None
|
||||
109
migration/plcfreshwater.ipynb
Normal file
109
migration/plcfreshwater.ipynb
Normal file
@@ -0,0 +1,109 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'monthHolding': 12345,\n",
|
||||
" 'dayHolding': 12367,\n",
|
||||
" 'lifetime': 123469,\n",
|
||||
" 'day': 14,\n",
|
||||
" 'month': 3,\n",
|
||||
" 'Totalizers': False,\n",
|
||||
" 'year': 2024}"
|
||||
]
|
||||
},
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import json, time\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Original JSON object\n",
|
||||
"original_json = {\n",
|
||||
" 'Todays': 43,\n",
|
||||
" 'Yesterdays': 42,\n",
|
||||
" 'Current Months': 5555,\n",
|
||||
" 'Previous Months': 5552,\n",
|
||||
" 'Monthly Holding': 12345,\n",
|
||||
" 'Daily Holding': 12367,\n",
|
||||
" 'Lifetime': 123469,\n",
|
||||
" 'Day': 14,\n",
|
||||
" 'Month': 3,\n",
|
||||
" 'Last Report': 12343432324324,\n",
|
||||
" 'Totalizers': False\n",
|
||||
"}\n",
|
||||
"\"\"\"\n",
|
||||
"{\n",
|
||||
" \"day\": 0,\n",
|
||||
" \"dayHolding\": 0,\n",
|
||||
" \"lifetime\": 0,\n",
|
||||
" \"month\": 0,\n",
|
||||
" \"monthHolding\": 0,\n",
|
||||
" \"week\": 0,\n",
|
||||
" \"weekHolding\": 0,\n",
|
||||
" \"year\": 0,\n",
|
||||
" \"yearHolding\": 0\n",
|
||||
"}\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def convertPersist(original_json):\n",
|
||||
" # Mapping of keys\n",
|
||||
" key_mapping = {\n",
|
||||
" 'Todays': None, \n",
|
||||
" 'Yesterdays': None, \n",
|
||||
" 'Current Months': None, \n",
|
||||
" 'Previous Months': None, \n",
|
||||
" 'Monthly Holding': 'monthHolding',\n",
|
||||
" 'Daily Holding': 'dayHolding',\n",
|
||||
" 'Lifetime': 'lifetime',\n",
|
||||
" 'Day': 'day',\n",
|
||||
" 'Month': 'month',\n",
|
||||
" 'Last Report': None,\n",
|
||||
" 'Totalizers': 'Totalizers'\n",
|
||||
" }\n",
|
||||
" now = round(time.time()/600)*600*1000\n",
|
||||
" # Convert keys and build new JSON object\n",
|
||||
" new_json = {}\n",
|
||||
" for original_key, new_key in key_mapping.items():\n",
|
||||
" if new_key:\n",
|
||||
" new_json[new_key] = original_json[original_key]\n",
|
||||
" new_json[\"year\"] = time.gmtime(now/1000.0).tm_year\n",
|
||||
" # Convert to JSON format\n",
|
||||
" #new_json_str = json.dumps(new_json, indent=4, sort_keys=True)\n",
|
||||
" return(new_json)\n",
|
||||
"convertPersist(original_json=original_json)\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "env-01",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -16,7 +16,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -27,7 +27,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -40,7 +40,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -54,7 +54,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -74,7 +74,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -85,7 +85,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -102,7 +102,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -128,7 +128,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"execution_count": 9,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -138,7 +138,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -167,7 +167,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 16,
|
||||
"execution_count": 11,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -186,17 +186,13 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 17,
|
||||
"execution_count": 12,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"/Users/nico/miniforge3/envs/thingsboard/lib/python3.10/site-packages/xlsxwriter/worksheet.py:3261: UserWarning: Must have at least one data row in in add_table()\n",
|
||||
" warn(\"Must have at least one data row in in add_table()\")\n",
|
||||
"/Users/nico/miniforge3/envs/thingsboard/lib/python3.10/site-packages/xlsxwriter/worksheet.py:3261: UserWarning: Must have at least one data row in in add_table()\n",
|
||||
" warn(\"Must have at least one data row in in add_table()\")\n",
|
||||
"/Users/nico/miniforge3/envs/thingsboard/lib/python3.10/site-packages/xlsxwriter/worksheet.py:3261: UserWarning: Must have at least one data row in in add_table()\n",
|
||||
" warn(\"Must have at least one data row in in add_table()\")\n",
|
||||
"/Users/nico/miniforge3/envs/thingsboard/lib/python3.10/site-packages/xlsxwriter/worksheet.py:3261: UserWarning: Must have at least one data row in in add_table()\n",
|
||||
|
||||
Reference in New Issue
Block a user