updated code for chunking and reorganized

This commit is contained in:
Nico Melone
2023-12-14 13:16:36 -06:00
parent a75d279bf6
commit 14c29b6718
52 changed files with 7044 additions and 1824 deletions

BIN
Pub_Sub/.DS_Store vendored

Binary file not shown.

View File

@@ -290,10 +290,6 @@
{
"key": "MAC",
"value": "00:18:05:1e:94:f4"
},
{
"key": "MAC_UPPER",
"value": "00:18:05:1A:E5:57"
}
],
"quickfaas": {

File diff suppressed because one or more lines are too long

View File

@@ -289,10 +289,6 @@
{
"key": "MAC",
"value": "00:18:05:1e:95:14"
},
{
"key": "MAC_UPPER",
"value": "00:18:05:1A:E5:57"
}
],
"quickfaas": {

File diff suppressed because one or more lines are too long

View File

@@ -26,10 +26,10 @@ def sync():
for measure in controller["measures"]:
#publish measure
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
payload[measure["name"]] = convert_int(measure["name"], measure["value"])
payload[measure["name"]+ "_int"] = measure["value"]
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
payload["values"][measure["name"]+ "_int"] = measure["value"]
else:
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
for chunk in chunk_payload(payload=payload):

File diff suppressed because one or more lines are too long

View File

@@ -26,10 +26,10 @@ def sync():
for measure in controller["measures"]:
#publish measure
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
payload[measure["name"]] = convert_int(measure["name"], measure["value"])
payload[measure["name"]+ "_int"] = measure["value"]
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
payload["values"][measure["name"]+ "_int"] = measure["value"]
else:
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
for chunk in chunk_payload(payload=payload):

File diff suppressed because one or more lines are too long

View File

@@ -26,10 +26,10 @@ def sync():
for measure in controller["measures"]:
#publish measure
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
payload[measure["name"]] = convert_int(measure["name"], measure["value"])
payload[measure["name"]+ "_int"] = measure["value"]
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
payload["values"][measure["name"]+ "_int"] = measure["value"]
else:
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
for chunk in chunk_payload(payload=payload):

View File

@@ -1,161 +0,0 @@
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot(reason="Rebooting for config file update"):
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
logger.info(reason)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
try:
if not os.path.exists(path):
logger.debug("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
except Exception as e:
logger.error(f"Something went wrong in checkFileExist while making folder: {e}")
try:
if not os.path.exists(path + "/" + filename):
logger.debug("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
except Exception as e:
logger.error(f"Something went wrong in checkFileExist wihle making file: {e}")
def convertDStoJSON(ds):
j = dict()
try:
for x in ds:
j[x["key"]] = x["value"]
except Exception as e:
logger.error(f"Something went wrong in convertDStoJSON: {e}")
return j
def convertJSONtoDS(j):
d = []
try:
for key in j.keys():
d.append({"key": key, "value": j[key]})
except Exception as e:
logger.error(f"Something went wrong in convertJSONtoDS: {e}")
return d
def checkCredentialConfig():
logger.debug("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
try:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.debug(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
try:
checkFileExist("creds.json")
except Exception as e:
logger.error(f"Error in checkFileExist: {e}")
with open(credspath, "r") as c:
try:
creds = json.load(c)
if creds:
logger.debug("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
except Exception as e:
logger.error(f"Error trying to load credentials from file: {e}")
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.debug("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
except Exception as e:
logger.error(f"Somethign went wrong in checkCredentialConfig: {e}")
def checkParameterConfig(cfg):
try:
logger.debug("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.debug("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.debug("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.debug("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
except Exception as e:
logger.error(f"Something went wrong in checkParameterConfig: {e}")
os.system(f'rm {paramspath}')
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message):
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)

View File

@@ -150,6 +150,7 @@ def sendData(message,wizard_api):
#publish(__topic__, json.dumps(message), __qos__)
try:
checkCredentialConfig()
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
except Exception as e:
logger.error(f"Error in checkCredentialConfig: {e}")
try:

View File

@@ -199,6 +199,7 @@ def sendData(message,wizard_api):
#publish(__topic__, json.dumps(message), __qos__)
try:
checkCredentialConfig()
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
except Exception as e:
logger.error(f"Error in checkCredentialConfig: {e}")
try:

View File

@@ -1,99 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import convert_config\n",
"import os"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"root = \"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub\"\n",
"devicetype = \"hrvalvecontroller\"\n",
"platform = \"thingsboard\" #\"mistaway\"\n",
"platform_short = \"tb\" if platform == \"thingsboard\" else \"ma\" \n",
"startversion = 1\n",
"deviceconfig = devicetype + \"_\" + platform_short + \"_\" +\"v\" + str(startversion) + \".cfg\"\n"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"checking path exists: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/hrvalvecontroller/thingsboard/v1/hrvalvecontroller_tb_v1.cfg\n",
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/hrvalvecontroller/thingsboard/v1\n",
"Write Code Path: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/hrvalvecontroller/thingsboard/v1/pub/sendData.py\n",
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/hrvalvecontroller/thingsboard/v1/pub\n",
"Path didn't exist creating path: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/hrvalvecontroller/thingsboard/v1/pub\n"
]
}
],
"source": [
"convert_config.write_code(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/pub\" + \"/sendData.py\", convert_config.get_config_pub(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/\" + deviceconfig))\n",
"#convert_config.write_code(root + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/sub\" + \"/receiveCommand.py\", convert_config.get_config_sub(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/\" + deviceconfig))\n"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1\n",
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v2\n",
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v2/pub\n"
]
}
],
"source": [
"convert_config.write_config(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion + 1) + \"/\" + devicetype + \"_\" + platform_short + \"_v\" + str(startversion + 1) + \".cfg\", \n",
" convert_config.get_config(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/\" + devicetype + \"_\" + platform_short + \"_v\" + str(startversion) + \".cfg\"),\n",
" root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion + 1) + \"/pub\" , \n",
" root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion + 1) + \"/sub\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3.10.5 ('env-01')",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.5"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "661929b843193117f8407b47db3d9660d53447b05faf9ee8b39d0697e59e9e99"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -1,94 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"abbflow_tcp_tb_v3.cfg\n"
]
}
],
"source": [
"import convert_config\n",
"import os\n",
"import re\n",
"\n",
"def get_latest_version(root, devicetype, platform):\n",
" platform_path = os.path.join(root, devicetype, platform)\n",
" if not os.path.exists(platform_path):\n",
" return 1\n",
" version_pattern = re.compile(r'v(\\d+)')\n",
" max_version = 0\n",
" for file in os.listdir(platform_path):\n",
" match = version_pattern.search(file)\n",
" if match:\n",
" version = int(match.group(1))\n",
" max_version = max(max_version, version)\n",
" return max_version + 1\n",
"\n",
"# Example usage\n",
"root = \"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub\"\n",
"devicetype = \"abbflow_tcp\"\n",
"platform = \"thingsboard\" # or \"mistaway\"\n",
"platform_short = \"tb\" if platform == \"thingsboard\" else \"ma\" \n",
"startversion = get_latest_version(root, devicetype, platform)\n",
"\n",
"deviceconfig = f\"{devicetype}_{platform_short}_v{startversion}.cfg\"\n",
"print(deviceconfig)\n",
"# Rest of the code from config_manager.ipynb\n",
"# This would include calls to convert_config.write_code and convert_config.write_config\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/abbflow_tcp/thingsboard\n",
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/abbflow_tcp/thingsboard\n",
"Path did not exist creating path: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/abbflow_tcp/thingsboard/abbflow_tcp_tb_v3.cfg\n",
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/abbflow_tcp/thingsboard/pub\n",
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/abbflow_tcp/thingsboard/sub\n"
]
}
],
"source": [
"convert_config.write_config(f\"{root}/{devicetype}/{platform}/{devicetype}_{platform_short}_v{str(startversion)}.cfg\", \n",
" convert_config.get_config(f\"{root}/{devicetype}/{platform}/{devicetype}_{platform_short}_v{str(startversion - 1)}.cfg\"),\n",
" root + \"/\" + devicetype + \"/\" + platform + \"/pub\" , \n",
" root + \"/\" + devicetype + \"/\" + platform + \"/sub\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "tbDataCollector",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -1,80 +0,0 @@
from base64 import encode
import os
import json
def get_config(path):
checkFileExist(path)
with open(path, "r", encoding="utf-8") as f:
return json.load(f)
def get_config_pub(path):
print("checking path exists: " + path)
checkFileExist(path)
with open(path, "r") as f:
codeString = json.load(f)
return codeString["quickfaas"]["uploadFuncs"][0]["script"]
def get_config_sub(path):
checkFileExist(path)
with open(path, "r") as f:
codeString = json.load(f)
return codeString["quickfaas"]["downloadFuncs"][0]["script"]
def code_to_string(path):
checkFileExist(path)
try:
with open(path, "r") as f:
return f.read()
except:
return False
def write_code(path, codestr):
print("Write Code Path: " + path)
#print(codestr)
checkFileExist(path)
with open(path, "w") as f:
f.write(codestr)
def write_config(path, config, pubDir, subDir):
checkFileExist(path)
with open(path, "w") as f:
if pubDir:
checkFolderExist(pubDir)
with os.scandir(pubDir) as it:
funcNum = 0
for entry in it:
#print(entry)
#print(config["quickfaas"]["uploadFuncs"][ind])
if not entry.name.startswith('.') and entry.is_file():
config["quickfaas"]["uploadFuncs"][funcNum]["script"] = code_to_string(entry.path)
funcNum += 1
if subDir:
checkFolderExist(subDir)
with os.scandir(subDir) as it:
funcNum = 0
for entry in it:
#print(entry)
#print(config["quickfaas"]["downloadFuncs"][ind])
if not entry.name.startswith('.') and entry.is_file():
config["quickfaas"]["downloadFuncs"][funcNum]["script"] = code_to_string(entry.path)
funcNum += 1
config["clouds"][0]["args"]["host"] = "hp.henrypump.cloud"
config["clouds"][0]["args"]["clientId"] = "unknown"
config["clouds"][0]["args"]["username"] = "unknown"
config["clouds"][0]["args"]["passwd"] = "unknown"
json.dump(config, f, indent=4)
def checkFileExist(path):
print("/".join(path.split("/")[:-1]))
if not os.path.exists("/".join(path.split("/")[:-1])):
print("Path didn't exist creating path: " + "/".join(path.split("/")[:-1]))
os.makedirs("/".join(path.split("/")[:-1]))
open(path, "a").close()
if not os.path.exists(path):
print("Path did not exist creating path: " + path)
open(path, "a").close()
def checkFolderExist(path):
if not os.path.exists(path):
print("Making folder" + path)
os.makedirs(path)

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,201 @@
# Enter your python code.
import json, time
from common.Logger import logger
from quickfaas.remotebus import publish
from quickfaas.measure import recall
def sendAlarm(message):
logger.info(message)
payload = {}
payload["ts"] = time.time()*1000
payload["values"] = {message["measureName"]: message["value"]}
publish(__topic__, json.dumps(payload), __qos__)
sync()
def sync():
#get new values and send
payload = {"ts": time.time()*1000, "values": {}}
try:
data = recall()#json.loads(recall().decode("utf-8"))
except Exception as e:
logger.error(e)
logger.debug(data)
for controller in data:
for measure in controller["measures"]:
#publish measure
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
payload["values"][measure["name"]+ "_int"] = measure["value"]
else:
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(__topic__))
logger.debug("Sending value: {}".format(payload))
publish(__topic__, json.dumps(payload), 1)
def convert_int(plc_tag, value):
well_status_codes = {
0: "Running",
1: "Pumped Off",
2: "Alarmed",
3: "Locked Out",
4: "Stopped"
}
pid_control_codes = {
0: "Flow",
1: "Fluid Level",
2: "Tubing Pressure",
3: "Manual"
}
downhole_codes = {
0: "OK",
1: "Connecting",
2: "Open Circuit",
3: "Shorted",
4: "Cannot Decode"
}
permissive_codes = {
0: "OK",
1: "Flow",
2: "Intake Pressure",
3: "Intake Temperature",
4: "Tubing Pressure",
5: "VFD",
6: "Fluid Level",
7: "Min. Downtime"
}
alarm_codes = {
0: "OK",
1: "Alarm"
}
alarm_vfd_codes = {
0: "OK",
1: "Locked Out"
}
vfd_fault_codes = {
0: "No Fault",
2: "Auxiliary Input",
3: "Power Loss",
4: "UnderVoltage",
5: "OverVoltage",
7: "Motor Overload",
8: "Heatsink OverTemp",
9: "Thermister OverTemp",
10: "Dynamic Brake OverTemp",
12: "Hardware OverCurrent",
13: "Ground Fault",
14: "Ground Warning",
15: "Load Loss",
17: "Input Phase Loss",
18: "Motor PTC Trip",
19: "Task Overrun",
20: "Torque Prove Speed Band",
21: "Output Phase Loss",
24: "Decel Inhibit",
25: "OverSpeed Limit",
26: "Brake Slipped",
27: "Torque Prove Conflict",
28: "TP Encls Confict",
29: "Analog In Loss",
33: "Auto Restarts Exhausted",
35: "IPM OverCurrent",
36: "SW OverCurrent",
38: "Phase U to Ground",
39: "Phase V to Ground",
40: "Phase W to Ground",
41: "Phase UV Short",
42: "Phase VW Short",
43: "Phase WU Short",
44: "Phase UNeg to Ground",
45: "Phase VNeg to Ground",
46: "Phase WNeg to Ground",
48: "System Defaulted",
49: "Drive Powerup",
51: "Clear Fault Queue",
55: "Control Board Overtemp",
59: "Invalid Code",
61: "Shear Pin 1",
62: "Shear Pin 2",
64: "Drive Overload",
66: "OW Torque Level",
67: "Pump Off",
71: "Port 1 Adapter",
72: "Port 2 Adapter",
73: "Port 3 Adapter",
74: "Port 4 Adapter",
75: "Port 5 Adapter",
76: "Port 6 Adapter",
77: "IR Volts Range",
78: "FluxAmps Ref Range",
79: "Excessive Load",
80: "AutoTune Aborted",
81: "Port 1 DPI Loss",
82: "Port 2 DPI Loss",
83: "Port 3 DPI Loss",
84: "Port 4 DPI Loss",
85: "Port 5 DPI Loss",
86: "Port 6 DPI Loss",
87: "IXo Voltage Range",
91: "Primary Velocity Feedback Loss",
93: "Hardware Enable Check",
94: "Alternate Velocity Feedback Loss",
95: "Auxiliary Velocity Feedback Loss",
96: "Position Feedback Loss",
97: "Auto Tach Switch",
100: "Parameter Checksum",
101: "Power Down NVS Blank",
102: "NVS Not Blank",
103: "Power Down NVS Incompatible",
104: "Power Board Checksum",
106: "Incompat MCB-PB",
107: "Replaced MCB-PB",
108: "Analog Calibration Checksum",
110: "Invalid Power Board Data",
111: "Power Board Invalid ID",
112: "Power Board App Min Version",
113: "Tracking DataError",
115: "Power Down Table Full",
116: "Power Down Entry Too Large",
117: "Power Down Data Checksum",
118: "Power Board Power Down Checksum",
124: "App ID Changed",
125: "Using Backup App",
134: "Start on Power Up",
137: "External Precharge Error",
138: "Precharge Open",
141: "Autotune Enc Angle",
142: "Autotune Speed Restricted",
143: "Autotune Current Regulator",
144: "Autotune Inertia",
145: "Autotune Travel",
13035: "Net IO Timeout",
13037: "Net IO Timeout"
}
plc_tags = {
"wellstatus": well_status_codes.get(value, "Invalid Code"),
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
"runpermissive": permissive_codes.get(value, "Invalid Code"),
"startpermissive": permissive_codes.get(value, "Invalid Code"),
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
}
return plc_tags.get(plc_tag, "Invalid Tag")

View File

@@ -0,0 +1,323 @@
# Enter your python code.
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot(reason="Rebooting for config file update"):
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
logger.info(reason)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
if not os.path.exists(path):
logger.debug("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
if not os.path.exists(path + "/" + filename):
logger.debug("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
def convertDStoJSON(ds):
j = dict()
for x in ds:
j[x["key"]] = x["value"]
return j
def convertJSONtoDS(j):
d = []
for key in j.keys():
d.append({"key": key, "value": j[key]})
return d
def checkCredentialConfig():
logger.debug("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.debug(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
checkFileExist("creds.json")
with open(credspath, "r") as c:
creds = json.load(c)
if creds:
logger.debug("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.debug("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
def checkParameterConfig(cfg):
logger.debug("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.debug("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.debug("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.debug("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message):
#logger.debug(message)
try:
checkCredentialConfig()
except Exception as e:
logger.error(e)
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
for measure in message["measures"]:
try:
logger.debug(measure)
if abs(payload["ts"]/1000 - measure["timestamp"]) > 3600:
reboot(reason="Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}".format(payload["ts"]/1000,measure["timestamp"]))
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
logger.debug("Converting DINT/BOOL to STRING")
value = convert_int(measure["name"], measure["value"])
logger.debug("Converted {} to {}".format(measure["value"], value))
payload["values"][measure["name"]] = value
payload["values"][measure["name"] + "_int"] = measure["value"]
else:
payload["values"][measure["name"]] = measure["value"]
except Exception as e:
logger.error(e)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
def convert_int(plc_tag, value):
well_status_codes = {
0: "Running",
1: "Pumped Off",
2: "Alarmed",
3: "Locked Out",
4: "Stopped"
}
pid_control_codes = {
0: "Flow",
1: "Fluid Level",
2: "Tubing Pressure",
3: "Manual"
}
downhole_codes = {
0: "OK",
1: "Connecting",
2: "Open Circuit",
3: "Shorted",
4: "Cannot Decode"
}
permissive_codes = {
0: "OK",
1: "Flow",
2: "Intake Pressure",
3: "Intake Temperature",
4: "Tubing Pressure",
5: "VFD",
6: "Fluid Level",
7: "Min. Downtime"
}
alarm_codes = {
0: "OK",
1: "Alarm"
}
alarm_vfd_codes = {
0: "OK",
1: "Locked Out"
}
vfd_fault_codes = {
0: "No Fault",
2: "Auxiliary Input",
3: "Power Loss",
4: "UnderVoltage",
5: "OverVoltage",
7: "Motor Overload",
8: "Heatsink OverTemp",
9: "Thermister OverTemp",
10: "Dynamic Brake OverTemp",
12: "Hardware OverCurrent",
13: "Ground Fault",
14: "Ground Warning",
15: "Load Loss",
17: "Input Phase Loss",
18: "Motor PTC Trip",
19: "Task Overrun",
20: "Torque Prove Speed Band",
21: "Output Phase Loss",
24: "Decel Inhibit",
25: "OverSpeed Limit",
26: "Brake Slipped",
27: "Torque Prove Conflict",
28: "TP Encls Confict",
29: "Analog In Loss",
33: "Auto Restarts Exhausted",
35: "IPM OverCurrent",
36: "SW OverCurrent",
38: "Phase U to Ground",
39: "Phase V to Ground",
40: "Phase W to Ground",
41: "Phase UV Short",
42: "Phase VW Short",
43: "Phase WU Short",
44: "Phase UNeg to Ground",
45: "Phase VNeg to Ground",
46: "Phase WNeg to Ground",
48: "System Defaulted",
49: "Drive Powerup",
51: "Clear Fault Queue",
55: "Control Board Overtemp",
59: "Invalid Code",
61: "Shear Pin 1",
62: "Shear Pin 2",
64: "Drive Overload",
66: "OW Torque Level",
67: "Pump Off",
71: "Port 1 Adapter",
72: "Port 2 Adapter",
73: "Port 3 Adapter",
74: "Port 4 Adapter",
75: "Port 5 Adapter",
76: "Port 6 Adapter",
77: "IR Volts Range",
78: "FluxAmps Ref Range",
79: "Excessive Load",
80: "AutoTune Aborted",
81: "Port 1 DPI Loss",
82: "Port 2 DPI Loss",
83: "Port 3 DPI Loss",
84: "Port 4 DPI Loss",
85: "Port 5 DPI Loss",
86: "Port 6 DPI Loss",
87: "IXo Voltage Range",
91: "Primary Velocity Feedback Loss",
93: "Hardware Enable Check",
94: "Alternate Velocity Feedback Loss",
95: "Auxiliary Velocity Feedback Loss",
96: "Position Feedback Loss",
97: "Auto Tach Switch",
100: "Parameter Checksum",
101: "Power Down NVS Blank",
102: "NVS Not Blank",
103: "Power Down NVS Incompatible",
104: "Power Board Checksum",
106: "Incompat MCB-PB",
107: "Replaced MCB-PB",
108: "Analog Calibration Checksum",
110: "Invalid Power Board Data",
111: "Power Board Invalid ID",
112: "Power Board App Min Version",
113: "Tracking DataError",
115: "Power Down Table Full",
116: "Power Down Entry Too Large",
117: "Power Down Data Checksum",
118: "Power Board Power Down Checksum",
124: "App ID Changed",
125: "Using Backup App",
134: "Start on Power Up",
137: "External Precharge Error",
138: "Precharge Open",
141: "Autotune Enc Angle",
142: "Autotune Speed Restricted",
143: "Autotune Current Regulator",
144: "Autotune Inertia",
145: "Autotune Travel",
13035: "Net IO Timeout",
13037: "Net IO Timeout"
}
plc_tags = {
"wellstatus": well_status_codes.get(value, "Invalid Code"),
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
"runpermissive": permissive_codes.get(value, "Invalid Code"),
"startpermissive": permissive_codes.get(value, "Invalid Code"),
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
}
return plc_tags.get(plc_tag, "Invalid Tag")

View File

@@ -0,0 +1,278 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
except Exception as e:
logger.error(e)
logger.debug(data)
for controller in data:
for measure in controller["measures"]:
#publish measure
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
payload["values"][measure["name"]+ "_int"] = measure["value"]
else:
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:
#value = json.loads(value.replace("'",'"'))
logger.debug(value)
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
message = [{"name": "advvfdipp", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
resp = write(message)
logger.debug("RETURN FROM WRITE: {}".format(resp))
return True
except Exception as e:
logger.debug(e)
return False
def receiveCommand(topic, payload):
try:
logger.debug(topic)
logger.debug(json.loads(payload))
p = json.loads(payload)
command = p["method"]
commands = {
"sync": sync,
"writeplctag": writeplctag,
}
if command == "setPLCTag":
try:
result = commands["writeplctag"](p["params"])
logger.debug(result)
except Exception as e:
logger.error(e)
elif command == "changeSetpoint":
try:
logger.debug("attempting controlpoint write")
params_type = {"measurement": "pidcontrolmode", "value": p["params"]["setpointType"]}
if params_type["value"]:
commands["writeplctag"](params_type)
time.sleep(2)
except Exception as e:
logger.error("DID NOT WRITE CONTROL MODE")
logger.error(e)
try:
logger.debug("attempting setpoint write")
modes = {0: "flowsetpoint", 1: "fluidlevelsetpoint", 2: "tubingpressuresetpoint", 3: "manualfrequencysetpoint"}
params_value = {"value": p["params"]["setpointValue"]}
if params_value["value"]:
params_value["measurement"] = modes[getMode()]
result = commands["writeplctag"](params_value)
logger.debug(result)
except Exception as e:
logger.error("DID NOT WRITE SETPOINT")
logger.error(e)
#logger.debug(command)
ack(topic.split("/")[-1])
time.sleep(5)
sync()
except Exception as e:
logger.debug(e)
def ack(msgid):
#logger.debug(msgid)
#logger.debug(mac)
#logger.debug(name)
#logger.debug(value)
publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}), 1)
def getMode():
try:
data = recall()
for controller in data:
for measure in controller["measures"]:
if measure["name"] == "pidcontrolmode":
return measure["value"]
except:
return None
def convert_int(plc_tag, value):
well_status_codes = {
0: "Running",
1: "Pumped Off",
2: "Alarmed",
3: "Locked Out",
4: "Stopped"
}
pid_control_codes = {
0: "Flow",
1: "Fluid Level",
2: "Tubing Pressure",
3: "Manual"
}
downhole_codes = {
0: "OK",
1: "Connecting",
2: "Open Circuit",
3: "Shorted",
4: "Cannot Decode"
}
permissive_codes = {
0: "OK",
1: "Flow",
2: "Intake Pressure",
3: "Intake Temperature",
4: "Tubing Pressure",
5: "VFD",
6: "Fluid Level",
7: "Min. Downtime"
}
alarm_codes = {
0: "OK",
1: "Alarm"
}
alarm_vfd_codes = {
0: "OK",
1: "Locked Out"
}
vfd_fault_codes = {
0: "No Fault",
2: "Auxiliary Input",
3: "Power Loss",
4: "UnderVoltage",
5: "OverVoltage",
7: "Motor Overload",
8: "Heatsink OverTemp",
9: "Thermister OverTemp",
10: "Dynamic Brake OverTemp",
12: "Hardware OverCurrent",
13: "Ground Fault",
14: "Ground Warning",
15: "Load Loss",
17: "Input Phase Loss",
18: "Motor PTC Trip",
19: "Task Overrun",
20: "Torque Prove Speed Band",
21: "Output Phase Loss",
24: "Decel Inhibit",
25: "OverSpeed Limit",
26: "Brake Slipped",
27: "Torque Prove Conflict",
28: "TP Encls Confict",
29: "Analog In Loss",
33: "Auto Restarts Exhausted",
35: "IPM OverCurrent",
36: "SW OverCurrent",
38: "Phase U to Ground",
39: "Phase V to Ground",
40: "Phase W to Ground",
41: "Phase UV Short",
42: "Phase VW Short",
43: "Phase WU Short",
44: "Phase UNeg to Ground",
45: "Phase VNeg to Ground",
46: "Phase WNeg to Ground",
48: "System Defaulted",
49: "Drive Powerup",
51: "Clear Fault Queue",
55: "Control Board Overtemp",
59: "Invalid Code",
61: "Shear Pin 1",
62: "Shear Pin 2",
64: "Drive Overload",
66: "OW Torque Level",
67: "Pump Off",
71: "Port 1 Adapter",
72: "Port 2 Adapter",
73: "Port 3 Adapter",
74: "Port 4 Adapter",
75: "Port 5 Adapter",
76: "Port 6 Adapter",
77: "IR Volts Range",
78: "FluxAmps Ref Range",
79: "Excessive Load",
80: "AutoTune Aborted",
81: "Port 1 DPI Loss",
82: "Port 2 DPI Loss",
83: "Port 3 DPI Loss",
84: "Port 4 DPI Loss",
85: "Port 5 DPI Loss",
86: "Port 6 DPI Loss",
87: "IXo Voltage Range",
91: "Primary Velocity Feedback Loss",
93: "Hardware Enable Check",
94: "Alternate Velocity Feedback Loss",
95: "Auxiliary Velocity Feedback Loss",
96: "Position Feedback Loss",
97: "Auto Tach Switch",
100: "Parameter Checksum",
101: "Power Down NVS Blank",
102: "NVS Not Blank",
103: "Power Down NVS Incompatible",
104: "Power Board Checksum",
106: "Incompat MCB-PB",
107: "Replaced MCB-PB",
108: "Analog Calibration Checksum",
110: "Invalid Power Board Data",
111: "Power Board Invalid ID",
112: "Power Board App Min Version",
113: "Tracking DataError",
115: "Power Down Table Full",
116: "Power Down Entry Too Large",
117: "Power Down Data Checksum",
118: "Power Board Power Down Checksum",
124: "App ID Changed",
125: "Using Backup App",
134: "Start on Power Up",
137: "External Precharge Error",
138: "Precharge Open",
141: "Autotune Enc Angle",
142: "Autotune Speed Restricted",
143: "Autotune Current Regulator",
144: "Autotune Inertia",
145: "Autotune Travel",
13035: "Net IO Timeout",
13037: "Net IO Timeout"
}
plc_tags = {
"wellstatus": well_status_codes.get(value, "Invalid Code"),
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
"runpermissive": permissive_codes.get(value, "Invalid Code"),
"startpermissive": permissive_codes.get(value, "Invalid Code"),
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
}
return plc_tags.get(plc_tag, "Invalid Tag")

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,504 +0,0 @@
{
"controllers": [
{
"protocol": "EtherNet/IP",
"name": "plcpond",
"args": {},
"samplePeriod": 10,
"expired": 10000,
"endpoint": "192.168.1.12:44818"
}
],
"measures": [
{
"name": "pond_1_level",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Lev",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_total_bbls",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Total_Barrels",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_hi_alm",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "Pond_1_Hi_Alarm",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_hi_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Hi_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_hi_clr_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Hi_Clr_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_lo_alm",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "Pond_1_Lo_Alarm",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_lo_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Lo_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_lo_clr_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Lo_Clr_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_level",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Lev",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_total_bbls",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Total_Barrels",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_hi_alm",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "Pond_2_Hi_Alarm",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_hi_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Hi_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_hi_clr_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Hi_Clr_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "1.0",
"offset": "0.0"
},
{
"name": "pond_2_lo_alm",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "Pond_2_Lo_Alarm",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_lo_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Lo_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_lo_clr_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Lo_Clr_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
}
],
"alarmLables": [
"default"
],
"alarms": [],
"groups": [
{
"name": "default",
"uploadInterval": 600,
"reference": 16
}
],
"misc": {
"maxAlarmRecordSz": 2000,
"logLvl": "INFO",
"coms": [
{
"name": "rs232",
"baud": 9600,
"bits": 8,
"stopbits": 1,
"parityChk": "n"
},
{
"name": "rs485",
"baud": 9600,
"bits": 8,
"stopbits": 1,
"parityChk": "n"
}
]
},
"clouds": [
{
"cacheSize": 100,
"enable": 1,
"name": "default",
"type": "Standard MQTT",
"args": {
"host": "hp.henrypump.cloud",
"port": 1883,
"clientId": "hp",
"auth": 1,
"tls": 0,
"cleanSession": 0,
"mqttVersion": "v3.1.1",
"keepalive": 60,
"key": "",
"cert": "",
"rootCA": "",
"verifyServer": 0,
"verifyClient": 0,
"username": "hp",
"passwd": "hp",
"authType": 1
}
}
],
"quickfaas": {
"genericFuncs": [],
"uploadFuncs": [
{
"name": "sendData",
"trigger": "measure_event",
"topic": "v1/devices/me/telemetry",
"qos": 1,
"groups": [
"default"
],
"funcName": "sendData",
"script": "# Enter your python code.\nimport json\nimport time\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\n\n\ndef sendData(message):\n payload = {}\n payload[\"ts\"] = round(time.time() * 1000)\n payload[\"values\"] = {}\n for measure in message[\"measures\"]:\n try:\n logger.debug(measure)\n payload[\"values\"][measure[\"name\"]] = measure[\"value\"]\n except Exception as e:\n logger.error(e)\n publish(__topic__, json.dumps(payload), __qos__)",
"msgType": 0,
"cloudName": "default"
}
],
"downloadFuncs": []
},
"labels": [
{
"key": "SN",
"value": "GF5022223016120"
},
{
"key": "MAC",
"value": "00:18:05:21:b2:8a"
}
],
"modbusSlave": {
"enable": 0,
"protocol": "Modbus-TCP",
"port": 502,
"slaveAddr": 1,
"int16Ord": "ab",
"int32Ord": "abcd",
"float32Ord": "abcd",
"maxConnection": 5,
"mapping_table": []
},
"modbusRTUSlave": {
"enable": 0,
"protocol": "Modbus-RTU",
"coms": "rs485",
"slaveAddr": 1,
"int16Ord": "ab",
"int32Ord": "abcd",
"float32Ord": "abcd",
"mapping_table": []
},
"iec104Server": {
"enable": 0,
"cotSize": 2,
"port": 2404,
"serverList": [
{
"asduAddr": 1
}
],
"kValue": 12,
"wValue": 8,
"t0": 30,
"t1": 15,
"t2": 10,
"t3": 20,
"maximumLink": 5,
"timeSet": 1,
"byteOrder": "abcd",
"mapping_table": []
},
"iec104Client": {
"enable": 0,
"connectType": 2,
"serverAddr": "ipower.inhandcloud.cn",
"serverPort": 2404,
"communicationCode": "",
"protocol": 1,
"asduAddr": 1,
"tls": 0,
"mapping_table": {
"YX": [],
"YC": [],
"YK": []
}
},
"opcuaServer": {
"enable": 0,
"port": 4840,
"maximumLink": 5,
"securityMode": 0,
"identifierType": "String",
"mapping_table": []
},
"southMetadata": {},
"bindMetadata": {
"version": "",
"timestamp": ""
},
"bindConfig": {
"enable": 0,
"bind": {
"modelId": "",
"modelName": "",
"srcId": "",
"srcName": "",
"devId": "",
"devName": ""
},
"varGroups": [],
"variables": [],
"alerts": []
},
"version": "2.3.1"
}

View File

@@ -0,0 +1,33 @@
{
"timestamp": 1702477801,
"group_name": "default",
"values": {
"flowmeter": {
"totalizer_3_unit": {
"raw_data": 11,
"timestamp": 1702477799,
"status": 1
},
"totalizer_3": {
"raw_data": 8746.4500000000007,
"timestamp": 1702477799,
"status": 1
},
"totalizer_2": {
"raw_data": 8746.4400000000005,
"timestamp": 1702477799,
"status": 1
},
"totalizer_1": {
"raw_data": 8746.4400000000005,
"timestamp": 1702477799,
"status": 1
},
"flowrate": {
"raw_data": 10.0,
"timestamp": 1702477799,
"status": 1
}
}
}
}

View File

@@ -211,6 +211,8 @@ def sendData(message,wizard_api):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
if dayReset:
resetPayload["values"]["yesterday_volume"] = payload["values"]["day_volume"]
resetPayload["values"]["day_volume"] = 0

View File

@@ -0,0 +1,405 @@
import json, os, time, shutil
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
from mobiuspi_lib.gps import GPS
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot(reason="Rebooting for config file update"):
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
logger.info(reason)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
try:
if not os.path.exists(path):
logger.debug("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
except Exception as e:
logger.error(f"Something went wrong in checkFileExist while making folder: {e}")
try:
if not os.path.exists(path + "/" + filename):
logger.debug("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
except Exception as e:
logger.error(f"Something went wrong in checkFileExist wihle making file: {e}")
def convertDStoJSON(ds):
j = dict()
try:
for x in ds:
j[x["key"]] = x["value"]
except Exception as e:
logger.error(f"Something went wrong in convertDStoJSON: {e}")
return j
def convertJSONtoDS(j):
d = []
try:
for key in j.keys():
d.append({"key": key, "value": j[key]})
except Exception as e:
logger.error(f"Something went wrong in convertJSONtoDS: {e}")
return d
def checkCredentialConfig():
logger.debug("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
try:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.debug(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
try:
checkFileExist("creds.json")
except Exception as e:
logger.error(f"Error in checkFileExist: {e}")
with open(credspath, "r") as c:
try:
creds = json.load(c)
if creds:
logger.debug("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
except Exception as e:
logger.error(f"Error trying to load credentials from file: {e}")
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.debug("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
except Exception as e:
logger.error(f"Somethign went wrong in checkCredentialConfig: {e}")
def checkParameterConfig(cfg):
try:
logger.debug("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.debug("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.debug("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.debug("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
except Exception as e:
logger.error(f"Something went wrong in checkParameterConfig: {e}")
os.system(f'rm {paramspath}')
return cfg
payload = {}
def initialize_totalizers():
return {
"day": 0,
"week": 0,
"month": 0,
"year": 0,
"lifetime": 0,
"dayHolding": 0,
"weekHolding": 0,
"monthHolding": 0,
"yearHolding": 0
}
def getTotalizers(file_path="/var/user/files/totalizers.json"):
"""
Retrieves totalizer data from a JSON file.
:param file_path: Path to the JSON file storing totalizer data.
:return: Dictionary containing totalizer values.
"""
try:
with open(file_path, "r") as t:
totalizers = json.load(t)
if not totalizers or not isinstance(totalizers, dict):
logger.info("Invalid data format in the file. Initializing totalizers.")
totalizers = initialize_totalizers()
except FileNotFoundError:
logger.info("File not found. Initializing totalizers.")
totalizers = initialize_totalizers()
except json.JSONDecodeError:
timestamp = dt.now().strftime("%Y%m%d_%H%M%S")
# Split the file path and insert the timestamp before the extension
file_name, file_extension = os.path.splitext(file_path)
backup_file_path = f"{file_name}_{timestamp}{file_extension}"
shutil.copyfile(file_path, backup_file_path)
logger.error(f"Error decoding JSON. A backup of the file is created at {backup_file_path}. Initializing totalizers.")
totalizers = initialize_totalizers()
return totalizers
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message,wizard_api):
logger.debug(message)
checkCredentialConfig()
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
resetPayload = {"ts": "", "values": {}}
dayReset, weekReset, monthReset, yearReset = False, False, False, False
for measure in message["values"]["flowmeter"].keys():
try:
if message["values"]["flowmeter"][measure]["status"] == 1:
if measure in ["totalizer_1"]:
payload["values"]["day_volume"], dayReset = totalizeDay(message["values"]["flowmeter"][measure]["raw_data"])
payload["values"]["week_volume"], weekReset = totalizeWeek(message["values"]["flowmeter"][measure]["raw_data"])
payload["values"]["month_volume"], monthReset = totalizeMonth(message["values"]["flowmeter"][measure]["raw_data"])
payload["values"]["year_volume"], yearReset = totalizeYear(message["values"]["flowmeter"][measure]["raw_data"])
payload["values"][measure] = message["values"]["flowmeter"][measure]["raw_data"]
except Exception as e:
logger.error(e)
try:
payload["values"]["latitude"], payload["values"]["longitude"], payload["values"]["speed"] = getGPS()
except:
logger.error("Could not get GPS coordinates")
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
if dayReset:
resetPayload["values"]["yesterday_volume"] = payload["values"]["day_volume"]
resetPayload["values"]["day_volume"] = 0
if weekReset:
resetPayload["values"]["last_week_volume"] = payload["values"]["week_volume"]
resetPayload["values"]["week_volume"] = 0
if monthReset:
resetPayload["values"]["last_month_volume"] = payload["values"]["month_volume"]
resetPayload["values"]["month_volume"] = 0
if yearReset:
resetPayload["values"]["last_year_volume"] = payload["values"]["year_volume"]
resetPayload["values"]["year_volume"] = 0
if resetPayload["values"]:
resetPayload["ts"] = 1 + (round(dt.timestamp(dt.now())/600)*600)*1000
publish(__topic__, json.dumps(resetPayload), __qos__)
def saveTotalizers(totalizers, file_path="/var/user/files/totalizers.json"):
"""
Saves totalizer data to a JSON file.
:param totalizers: Dictionary containing totalizer values to be saved.
:param file_path: Path to the JSON file where totalizer data will be saved.
"""
try:
with open(file_path, "w") as t:
json.dump(totalizers, t)
except (IOError, OSError, json.JSONEncodeError) as e:
logger.error(f"Error saving totalizers to {file_path}: {e}")
raise # Optionally re-raise the exception if it should be handled by the caller
def getGPS():
# Create a gps instance
gps = GPS()
# Retrieve GPS information
position_status = gps.get_position_status()
logger.debug("position_status: ")
logger.debug(position_status)
latitude = position_status["latitude"].split(" ")
longitude = position_status["longitude"].split(" ")
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
if latitude[2] == "S":
lat_dec = lat_dec * -1
if longitude[2] == "W":
lon_dec = lon_dec * -1
#lat_dec = round(lat_dec, 7)
#lon_dec = round(lon_dec, 7)
logger.info("HERE IS THE GPS COORDS")
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
speedKnots = position_status["speed"].split(" ")
speedMPH = float(speedKnots[0]) * 1.151
return (f"{lat_dec:.8f}",f"{lon_dec:.8f}",f"{speedMPH:.2f}")
def totalizeDay(lifetime, max_retries=3, retry_delay=2):
"""
Update and save daily totalizers based on the lifetime value.
:param lifetime: The current lifetime total.
:param max_retries: Maximum number of save attempts.
:param retry_delay: Delay in seconds between retries.
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
"""
totalizers = getTotalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["dayHolding"]
if not int(now.strftime("%d")) == int(totalizers["day"]):
totalizers["dayHolding"] = lifetime
totalizers["day"] = int(now.strftime("%d"))
for attempt in range(max_retries):
try:
saveTotalizers(totalizers)
reset = True
return (value, reset)
except Exception as e:
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
if attempt < max_retries - 1:
time.sleep(retry_delay)
else:
logger.error("All attempts to save totalizers failed.")
return (None, False)
return (value, reset)
def totalizeWeek(lifetime, max_retries=3, retry_delay=2):
"""
Update and save weekly totalizers based on the lifetime value.
:param lifetime: The current lifetime total.
:param max_retries: Maximum number of save attempts.
:param retry_delay: Delay in seconds between retries.
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
"""
totalizers = getTotalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["weekHolding"]
if (not now.strftime("%U") == totalizers["week"] and now.strftime("%a") == "Sun") or totalizers["week"] == 0:
totalizers["weekHolding"] = lifetime
totalizers["week"] = now.strftime("%U")
for attempt in range(max_retries):
try:
saveTotalizers(totalizers)
reset = True
return (value, reset)
except Exception as e:
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
if attempt < max_retries - 1:
time.sleep(retry_delay)
else:
logger.error("All attempts to save totalizers failed.")
return (None, False)
return (value, reset)
def totalizeMonth(lifetime, max_retries=3, retry_delay=2):
"""
Update and save monthly totalizers based on the lifetime value.
:param lifetime: The current lifetime total.
:param max_retries: Maximum number of save attempts.
:param retry_delay: Delay in seconds between retries.
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
"""
totalizers = getTotalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["monthHolding"]
if not int(now.strftime("%m")) == int(totalizers["month"]):
totalizers["monthHolding"] = lifetime
totalizers["month"] = now.strftime("%m")
for attempt in range(max_retries):
try:
saveTotalizers(totalizers)
reset = True
return (value, reset)
except Exception as e:
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
if attempt < max_retries - 1:
time.sleep(retry_delay)
else:
logger.error("All attempts to save totalizers failed.")
return (None, False)
return (value,reset)
def totalizeYear(lifetime, max_retries=3, retry_delay=2):
"""
Update and save yearly totalizers based on the lifetime value.
:param lifetime: The current lifetime total.
:param max_retries: Maximum number of save attempts.
:param retry_delay: Delay in seconds between retries.
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
"""
totalizers = getTotalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["yearHolding"]
if not int(now.strftime("%Y")) == int(totalizers["year"]):
totalizers["yearHolding"] = lifetime
totalizers["year"] = now.strftime("%Y")
for attempt in range(max_retries):
try:
saveTotalizers(totalizers)
reset = True
return (value, reset)
except Exception as e:
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
if attempt < max_retries - 1:
time.sleep(retry_delay)
else:
logger.error("All attempts to save totalizers failed.")
return (None, False)
return (value, reset)

View File

@@ -1,21 +1,28 @@
# Enter your python code.
import json
import json, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
def chunk_payload(payload, chunk_size=20):
for controller, data in payload.items():
for entry in data:
ts = entry['ts']
values = entry['values']
chunked_values = list(values.items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"controller": controller,
"ts": ts,
"values": dict(chunked_values[i:i + chunk_size])
}
def chunk_payload(payload, chunk_size=20, is_attributes_payload=False):
if is_attributes_payload:
# For attributes payload, chunk the controllers
controllers = list(payload.items())
for i in range(0, len(controllers), chunk_size):
yield dict(controllers[i:i + chunk_size])
else:
# For data payload, chunk the values within each controller
for controller, data in payload.items():
for entry in data:
ts = entry['ts']
values = entry['values']
chunked_values = list(values.items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"controller": controller,
"ts": ts,
"values": dict(chunked_values[i:i + chunk_size])
}
def sendData(message):
#logger.debug(message)
@@ -53,5 +60,11 @@ def sendData(message):
#logger.debug(payload)
publish(__topic__, json.dumps(payload), __qos__)
publish("v1/gateway/attributes", json.dumps(attributes_payload), __qos__)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
for chunk in chunk_payload(payload=attributes_payload, is_attributes_payload=True):
publish("v1/gateway/attributes", json.dumps(attributes_payload), __qos__)
time.sleep(2)

View File

@@ -6,10 +6,9 @@ from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot(reason="Rebooting for config file update"):
def reboot():
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
logger.info(reason)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
@@ -38,21 +37,21 @@ def convertJSONtoDS(j):
return d
def checkCredentialConfig():
logger.debug("CHECKING CONFIG")
logger.info("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.debug(clouds)
logger.info(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
checkFileExist("creds.json")
with open(credspath, "r") as c:
creds = json.load(c)
if creds:
logger.debug("updating config with stored data")
logger.info("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
@@ -65,7 +64,7 @@ def checkCredentialConfig():
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.debug("updating stored file with new data")
logger.info("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
@@ -85,20 +84,20 @@ def checkCredentialConfig():
json.dump(creds,cw)
def checkParameterConfig(cfg):
logger.debug("Checking Parameters!!!!")
logger.info("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.debug("Opened param storage file")
logger.info("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.debug("equalizing params between cfg and stored")
logger.info("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
@@ -111,7 +110,7 @@ def checkParameterConfig(cfg):
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.debug("initializing param file with params in memory")
logger.info("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
@@ -133,26 +132,14 @@ def sendData(message):
except Exception as e:
logger.error(e)
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
run_tanks = {}
for measure in message["measures"]:
try:
logger.debug(measure)
if abs(payload["ts"]/1000 - measure["timestamp"]) > 3600:
reboot(reason="Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}".format(payload["ts"]/1000,measure["timestamp"]))
if measure["name"] in ["oil_run_tank","water_run_tank"]:
run_tanks[measure["name"]] = measure["value"]
if "_level" in measure["name"]:
run_tanks[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
except Exception as e:
logger.error(e)
payload["values"]["oil_run_tank_level"] = run_tanks["oil_0" + str(run_tanks["oil_run_tank"]) + "_level"]
payload["values"]["water_run_tank_level"] = run_tanks["water_0" + str(run_tanks["water_run_tank"]) + "_level"]
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)

View File

@@ -1,5 +1,5 @@
# Enter your python code.
import json, os
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
@@ -117,6 +117,15 @@ def checkParameterConfig(cfg):
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message):
#logger.debug(message)
try:
@@ -140,6 +149,8 @@ def sendData(message):
logger.error(e)
payload["values"]["oil_run_tank_level"] = run_tanks["oil_0" + str(run_tanks["oil_run_tank"]) + "_level"]
payload["values"]["water_run_tank_level"] = run_tanks["water_0" + str(run_tanks["water_run_tank"]) + "_level"]
publish(__topic__, json.dumps(payload), __qos__)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)

View File

@@ -1,11 +1,21 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {}
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
@@ -15,10 +25,14 @@ def sync():
for controller in data:
for measure in controller["measures"]:
#publish measure
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
publish(topic, json.dumps(payload), 1)
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:

View File

@@ -1,5 +1,5 @@
# Enter your python code.
import json, os
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
@@ -117,6 +117,15 @@ def checkParameterConfig(cfg):
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message):
#logger.debug(message)
try:
@@ -140,6 +149,10 @@ def sendData(message):
logger.error(e)
payload["values"]["oil_run_tank_level"] = run_tanks["oil_0" + str(run_tanks["oil_run_tank"]) + "_level"]
payload["values"]["water_run_tank_level"] = run_tanks["water_0" + str(run_tanks["water_run_tank"]) + "_level"]
publish(__topic__, json.dumps(payload), __qos__)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)

View File

@@ -1,11 +1,21 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {}
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
@@ -15,10 +25,13 @@ def sync():
for controller in data:
for measure in controller["measures"]:
#publish measure
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
publish(topic, json.dumps(payload), 1)
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:

View File

@@ -1,4 +1,4 @@
import json, os
import json, os, time
from datetime import datetime as dt
from datetime import timedelta as td
from common.Logger import logger
@@ -117,8 +117,16 @@ def checkParameterConfig(cfg):
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message, wizard_api):
def sendData(message):
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
resetPayload = {"ts": "", "values": {}}
try:
@@ -136,7 +144,9 @@ def sendData(message, wizard_api):
payload["values"][measure] = message["values"]["hrvalvecontroller"][measure]["raw_data"]
except Exception as e:
logger.error(e)
publish(__topic__, json.dumps(payload), __qos__, cloud_name="default")
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__, cloud_name="default")
if dayReset:

View File

@@ -1,4 +1,4 @@
import json, os
import json, os, time
from common.Logger import logger
from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
@@ -143,6 +143,15 @@ def checkParameterConfig(cfg):
os.system(f'rm {paramspath}')
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message):
payload = {}
payload["ts"] = (round(dt.timestamp(dt.now())/600)*600)*1000
@@ -157,5 +166,8 @@ def sendData(message):
payload["values"][measure["name"]] = measure["value"]
except Exception as e:
logger.error(e)
publish(__topic__, json.dumps(payload), __qos__)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)

View File

@@ -1,11 +1,21 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {}
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
@@ -15,10 +25,12 @@ def sync():
for controller in data:
for measure in controller["measures"]:
#publish measure
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
publish(topic, json.dumps(payload), 1)
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:

View File

@@ -1,35 +0,0 @@
import json
from common.Logger import logger
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
#v1/devices/me/attributes/response/+
def receiveAttributeResponse(topic, payload):
#All attributes were requested handle response
payload = json.loads(payload)
logger.info(topic)
logger.info(payload)
logger.info(payload["shared"].get("test"))
def updateConfig():
#get the stored config file and update the label with the new parameters
cfg = dict()
with open("/var/user/cfg/device_supervisor/device_supervisor.cfg", "r+") as f:
cfg = json.load(f)
labels = cfg.get("labels")
if labels:
labels = convertDStoJSON(labels)
def convertDStoJSON(ds):
j = dict()
for x in ds:
j[x["key"]] = x["value"]
return j
def convertJSONtoDS(j):
d = []
for key in j.keys():
d.append({"key": key, "value": j[key]})
return d

View File

@@ -1,9 +0,0 @@
import json
from common.Logger import logger
#v1/devices/me/attributes
def receiveAttributeUpdate(topic, payload):
#Attribute was updated/added server side needs to be updated/added locally
logger.info(topic)
logger.info(json.loads(payload))
logger.info(json.loads(payload)["shared"].get("test"))

View File

@@ -0,0 +1,12 @@
# Enter your python code.
import json, time
from common.Logger import logger
from quickfaas.remotebus import publish
def sendAlarm(message):
logger.info(message)
payload = {}
payload["ts"] = time.time()*1000
payload["values"] = {message["measureName"]: message["value"]}
publish(__topic__, json.dumps(payload), __qos__)

View File

@@ -0,0 +1,179 @@
# Enter your python code.
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from mobiuspi_lib.gps import GPS
from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot():
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
if not os.path.exists(path):
logger.info("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
if not os.path.exists(path + "/" + filename):
logger.info("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
def convertDStoJSON(ds):
j = dict()
for x in ds:
j[x["key"]] = x["value"]
return j
def convertJSONtoDS(j):
d = []
for key in j.keys():
d.append({"key": key, "value": j[key]})
return d
def checkCredentialConfig():
logger.info("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.info(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
checkFileExist("creds.json")
with open(credspath, "r") as c:
creds = json.load(c)
if creds:
logger.info("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.info("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
def checkParameterConfig(cfg):
logger.info("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.info("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.info("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.info("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def getGPS():
# Create a gps instance
gps = GPS()
# Retrieve GPS information
position_status = gps.get_position_status()
logger.debug("position_status: ")
logger.debug(position_status)
latitude = position_status["latitude"].split(" ")
longitude = position_status["longitude"].split(" ")
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
if latitude[2] == "S":
lat_dec = lat_dec * -1
if longitude[2] == "W":
lon_dec = lon_dec * -1
#lat_dec = round(lat_dec, 7)
#lon_dec = round(lon_dec, 7)
logger.info("HERE IS THE GPS COORDS")
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
speedKnots = position_status["speed"].split(" ")
speedMPH = float(speedKnots[0]) * 1.151
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"latitude":f"{lat_dec:.8f}", "longitude":f"{lon_dec:.8f}", "speed": f"{speedMPH:.2f}"}}), __qos__)
publish("v1/devices/me/attributes", json.dumps({"latitude":f"{lat_dec:.8f}", "longitude":f"{lon_dec:.8f}", "speed": f"{speedMPH:.2f}"}), __qos__)
def sendData(message):
payload = {}
payload["ts"] = (round(dt.timestamp(dt.now())/600)*600)*1000
payload["values"] = {}
try:
checkCredentialConfig()
except Exception as e:
logger.error(e)
for measure in message["measures"]:
try:
logger.debug(measure)
payload["values"][measure["name"]] = measure["value"]
except Exception as e:
logger.error(e)
try:
getGPS()
except Exception as e:
logger.error(e)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)

View File

@@ -0,0 +1,35 @@
# Enter your python code.
from common.Logger import logger
import json, time
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:
#value = json.loads(value.replace("'",'"'))
logger.debug(value)
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
message = [{"name": "tankalarms", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
resp = write(message)
logger.debug("RETURN FROM WRITE: {}".format(resp))
return True
except Exception as e:
logger.debug(e)
return False
def receiveAttributes(topic, payload):
logger.debug(topic)
logger.debug(json.loads(payload))
#payload format: {'tankalarms.water_hihi_spt': 12}
payload = json.loads(payload)
measures = list(payload.keys())
logger.debug(measures)
if "tankalarms." in measures[0]:
measure = measures[0].split(".")[1]
else:
measure = measures[0]
logger.debug(measure)
writeplctag({"measurement": measure, "value": payload[measures[0]]})

View File

@@ -31,20 +31,22 @@ def sync():
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value, wizard_api):
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:
#value = json.loads(value.replace("'",'"'))
logger.debug(value)
message = {"plcpond":{value["measurement"]: value["value"]}}
resp = wizard_api.write_plc_values(message)
#logger.debug("RETURN FROM WRITE: {}".format(resp))
#payload format: [{"name": "tankalarms", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
message = [{"name": "tankalarms", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
resp = write(message)
logger.debug("RETURN FROM WRITE: {}".format(resp))
return True
except Exception as e:
logger.debug(e)
return False
def receiveCommand(topic, payload, wizard_api):
def receiveCommand(topic, payload):
try:
logger.debug(topic)
logger.debug(json.loads(payload))
@@ -55,19 +57,24 @@ def receiveCommand(topic, payload, wizard_api):
"writeplctag": writeplctag,
}
if command == "setPLCTag":
result = commands["writeplctag"](p["params"],wizard_api)
if result:
sync()
#commands[command](p["mac"].lower(),p["payload"]["value"], wizard_api)
try:
result = commands["writeplctag"](p["params"])
logger.debug(result)
except Exception as e:
logger.error(e)
#logger.debug(command)
ack(topic.split("/")[-1], wizard_api)
ack(topic.split("/")[-1])
time.sleep(5)
sync()
except Exception as e:
logger.debug(e)
def ack(msgid, wizard_api):
def ack(msgid):
#logger.debug(msgid)
#logger.debug(mac)
#logger.debug(name)
#logger.debug(value)
wizard_api.mqtt_publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}))
publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}), 1)

View File

@@ -646,14 +646,6 @@
{
"key": "MAC",
"value": "00:18:05:22:9c:ec"
},
{
"key": "test",
"value": "wooopwooop"
},
{
"key": "itME",
"value": "AAAHHHHH"
}
],
"modbusSlave": {

View File

@@ -1,5 +1,5 @@
# Enter your python code.
import json, os
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
@@ -116,6 +116,16 @@ def checkParameterConfig(cfg):
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message):
logger.debug(message)
try:
@@ -144,7 +154,11 @@ def sendData(message):
payload["values"]["valve_status"] = 0
else:
payload["values"]["valve_status"] = -1
publish(__topic__, json.dumps(payload), __qos__)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
except Exception as e:
logger.error(e)

View File

@@ -1,19 +1,28 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {}
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
except Exception as e:
logger.error(e)
logger.info(data)
logger.debug(data)
for controller in data:
payload = {"ts": int(time.time()*1000), "values": {}}
valve_open = 0
valve_close = 0
for measure in controller["measures"]:
@@ -36,7 +45,10 @@ def sync():
payload["values"]["valve_status"] = "Unknown"
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
publish(topic, json.dumps(payload))
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
try:
#value = json.loads(value.replace("'",'"'))