updated various devicetypes
This commit is contained in:
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,2 +1,7 @@
|
|||||||
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
.DS_Store
|
||||||
|
Pub_Sub/.DS_Store
|
||||||
|
Pub_Sub/abbflow/.DS_Store
|
||||||
|
Pub_Sub/abbflow/v2/.DS_Store
|
||||||
|
Pub_Sub/advvfdipp/.DS_Store
|
||||||
|
|||||||
BIN
Pub_Sub/.DS_Store
vendored
BIN
Pub_Sub/.DS_Store
vendored
Binary file not shown.
BIN
Pub_Sub/abbflow/.DS_Store
vendored
BIN
Pub_Sub/abbflow/.DS_Store
vendored
Binary file not shown.
BIN
Pub_Sub/abbflow/v2/.DS_Store
vendored
BIN
Pub_Sub/abbflow/v2/.DS_Store
vendored
Binary file not shown.
BIN
Pub_Sub/advvfdipp/.DS_Store
vendored
BIN
Pub_Sub/advvfdipp/.DS_Store
vendored
Binary file not shown.
@@ -19,7 +19,7 @@
|
|||||||
"group": "default",
|
"group": "default",
|
||||||
"uploadType": "periodic",
|
"uploadType": "periodic",
|
||||||
"dataType": "FLOAT",
|
"dataType": "FLOAT",
|
||||||
"addr": "val_Flowmeter",
|
"addr": "val_Flowmeter_BarrelsPerDay",
|
||||||
"decimal": 2,
|
"decimal": 2,
|
||||||
"readWrite": "ro",
|
"readWrite": "ro",
|
||||||
"unit": "",
|
"unit": "",
|
||||||
@@ -351,7 +351,7 @@
|
|||||||
"ctrlName": "advvfdipp",
|
"ctrlName": "advvfdipp",
|
||||||
"group": "default",
|
"group": "default",
|
||||||
"uploadType": "periodic",
|
"uploadType": "periodic",
|
||||||
"dataType": "INT",
|
"dataType": "DINT",
|
||||||
"addr": "sts_PID_Control",
|
"addr": "sts_PID_Control",
|
||||||
"readWrite": "rw",
|
"readWrite": "rw",
|
||||||
"unit": "",
|
"unit": "",
|
||||||
@@ -489,7 +489,7 @@
|
|||||||
"args": {
|
"args": {
|
||||||
"host": "hp.henrypump.cloud",
|
"host": "hp.henrypump.cloud",
|
||||||
"port": 1883,
|
"port": 1883,
|
||||||
"clientId": "faskens-ci-1301",
|
"clientId": "unknown",
|
||||||
"auth": 1,
|
"auth": 1,
|
||||||
"tls": 0,
|
"tls": 0,
|
||||||
"cleanSession": 1,
|
"cleanSession": 1,
|
||||||
@@ -500,8 +500,8 @@
|
|||||||
"rootCA": "",
|
"rootCA": "",
|
||||||
"verifyServer": 0,
|
"verifyServer": 0,
|
||||||
"verifyClient": 0,
|
"verifyClient": 0,
|
||||||
"username": "faskensmqtt",
|
"username": "unknown",
|
||||||
"passwd": "faskensmqtt@1903",
|
"passwd": "unknown",
|
||||||
"authType": 1,
|
"authType": 1,
|
||||||
"willQos": 0,
|
"willQos": 0,
|
||||||
"willRetain": 0,
|
"willRetain": 0,
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
594
Pub_Sub/fkplcpond/thingsboard/v2/fkplcpond_ec2_2_1.cfg
Normal file
594
Pub_Sub/fkplcpond/thingsboard/v2/fkplcpond_ec2_2_1.cfg
Normal file
File diff suppressed because one or more lines are too long
666
Pub_Sub/fkplcpond/thingsboard/v3/fkplcpond_tb_v3.cfg
Normal file
666
Pub_Sub/fkplcpond/thingsboard/v3/fkplcpond_tb_v3.cfg
Normal file
File diff suppressed because one or more lines are too long
2841
Pub_Sub/gateway/1701.cfg
Normal file
2841
Pub_Sub/gateway/1701.cfg
Normal file
File diff suppressed because it is too large
Load Diff
155
Pub_Sub/hrtankbattery/thingsboard/v3/pub/sendDataNoel.py
Normal file
155
Pub_Sub/hrtankbattery/thingsboard/v3/pub/sendDataNoel.py
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
# Enter your python code.
|
||||||
|
import json, os, time
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
def reboot(reason="Rebooting for config file update"):
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
logger.info(reason)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.info("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.info("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.debug("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.debug(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.debug("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.debug("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
logger.debug("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.debug("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.debug("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.debug("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
def sendData(message):
|
||||||
|
#logger.debug(message)
|
||||||
|
try:
|
||||||
|
checkCredentialConfig()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
||||||
|
run_tanks = {}
|
||||||
|
|
||||||
|
for measure in message["measures"]:
|
||||||
|
try:
|
||||||
|
logger.debug(measure)
|
||||||
|
if abs(payload["ts"]/1000 - measure["timestamp"]) > 3600:
|
||||||
|
reboot(reason="Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}".format(payload["ts"]/1000,measure["timestamp"]))
|
||||||
|
if measure["name"] in ["oil_run_tank","water_run_tank"]:
|
||||||
|
run_tanks[measure["name"]] = measure["value"]
|
||||||
|
if "_level" in measure["name"]:
|
||||||
|
run_tanks[measure["name"]] = measure["value"]
|
||||||
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
payload["values"]["oil_run_tank_level"] = run_tanks["oil_0" + str(run_tanks["oil_run_tank"]) + "_level"]
|
||||||
|
payload["values"]["water_run_tank_level"] = run_tanks["water_0" + str(run_tanks["water_run_tank"]) + "_level"]
|
||||||
|
|
||||||
|
values_1 = dict(list(payload["values"].items())[len(payload["values"])//2:])
|
||||||
|
payload_1 = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
||||||
|
payload_1["values"] = values_1
|
||||||
|
publish(__topic__, json.dumps(payload_1), __qos__)
|
||||||
|
time.sleep(2)
|
||||||
|
values_2 = dict(list(payload["values"].items())[:len(payload["values"])//2])
|
||||||
|
payload_2 = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
||||||
|
payload_2["values"] = values_2
|
||||||
|
publish(__topic__, json.dumps(payload_2), __qos__)
|
||||||
|
#publish(__topic__, json.dumps(payload), __qos__)
|
||||||
|
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||||
|
|
||||||
343
Pub_Sub/plcfreshwater/mistaway/v1/pub/sendDatav2.py
Normal file
343
Pub_Sub/plcfreshwater/mistaway/v1/pub/sendDatav2.py
Normal file
@@ -0,0 +1,343 @@
|
|||||||
|
import json, os, uuid
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from common.Logger import logger
|
||||||
|
from paho.mqtt import client
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
def reboot(reason="Rebooting for config file update"):
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
logger.info(reason)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
try:
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.debug("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Something went wrong in checkFileExist while making folder: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.debug("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Something went wrong in checkFileExist wihle making file: {e}")
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
try:
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Something went wrong in convertDStoJSON: {e}")
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
try:
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Something went wrong in convertJSONtoDS: {e}")
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.debug("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
try:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.debug(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
try:
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in checkFileExist: {e}")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
try:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.debug("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error trying to load credentials from file: {e}")
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.debug("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Somethign went wrong in checkCredentialConfig: {e}")
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
try:
|
||||||
|
logger.debug("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.debug("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.debug("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.debug("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Something went wrong in checkParameterConfig: {e}")
|
||||||
|
os.system(f'rm {paramspath}')
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
payload = {}
|
||||||
|
|
||||||
|
def get_totalizers(mac):
|
||||||
|
try:
|
||||||
|
mac = "".join(mac.split(":"))
|
||||||
|
with open(f"/var/user/files/totalizers_{mac}.json", "r") as t:
|
||||||
|
totalizers = json.load(t)
|
||||||
|
if not totalizers:
|
||||||
|
logger.info("-----INITIALIZING TOTALIZERS-----")
|
||||||
|
totalizers = {
|
||||||
|
"day": 0,
|
||||||
|
"week": 0,
|
||||||
|
"month": 0,
|
||||||
|
"year": 0,
|
||||||
|
"lifetime": 0,
|
||||||
|
"dayHolding": 0,
|
||||||
|
"weekHolding": 0,
|
||||||
|
"monthHolding": 0,
|
||||||
|
"yearHolding": 0
|
||||||
|
}
|
||||||
|
except:
|
||||||
|
totalizers = {
|
||||||
|
"day": 0,
|
||||||
|
"week": 0,
|
||||||
|
"month": 0,
|
||||||
|
"year": 0,
|
||||||
|
"lifetime": 0,
|
||||||
|
"dayHolding": 0,
|
||||||
|
"weekHolding": 0,
|
||||||
|
"monthHolding": 0,
|
||||||
|
"yearHolding": 0
|
||||||
|
}
|
||||||
|
return totalizers
|
||||||
|
|
||||||
|
|
||||||
|
def lwt(mac, lwtData):
|
||||||
|
try:
|
||||||
|
#if not lwtData["connected"]:
|
||||||
|
if not lwtData["init"]:
|
||||||
|
print("INITIALIZING LWT CLIENT")
|
||||||
|
lwtData["client"].username_pw_set(username="admin", password="columbus")
|
||||||
|
lwtData["client"].will_set("meshify/db/194/_/mainHP/" + mac + ":00:00/connected",json.dumps([{"value":False}]))
|
||||||
|
lwtData["client"].reconnect_delay_set(min_delay=10, max_delay=120)
|
||||||
|
lwtData["init"] = True
|
||||||
|
print("Connecting to MQTT Broker for LWT purposes!!!!!!!")
|
||||||
|
lwtData["client"].connect("mq194.imistaway.net",1883, 600)
|
||||||
|
lwtData["client"].reconnect()
|
||||||
|
lwtData["client"].publish("meshify/db/194/_/mainHP/" + mac + ":00:00/connected", json.dumps([{"value":True}]))
|
||||||
|
except Exception as e:
|
||||||
|
print("LWT DID NOT DO THE THING")
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
lwtData = {}
|
||||||
|
|
||||||
|
def sendData(message,wizard_api):
|
||||||
|
logger.debug(message)
|
||||||
|
checkCredentialConfig()
|
||||||
|
|
||||||
|
for device in message["values"].keys():
|
||||||
|
if not device in lwtData.keys():
|
||||||
|
lwtData[device] = {
|
||||||
|
"init":False,
|
||||||
|
"client": client.Client(client_id=str(uuid.uuid4()), clean_session=True, userdata=None, protocol=client.MQTTv311, transport="tcp")
|
||||||
|
}
|
||||||
|
#setup LWT and refresh status
|
||||||
|
lwt(device, lwtData[device])
|
||||||
|
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
||||||
|
resetPayload = {"ts": "", "values": {}}
|
||||||
|
dayReset, weekReset, monthReset, yearReset = False, False, False, False
|
||||||
|
for measure in message["values"][device].keys():
|
||||||
|
try:
|
||||||
|
if message["values"][device][measure]["status"] == 1:
|
||||||
|
if measure in ["lifetime_flow_meter_gal"]:
|
||||||
|
payload["values"]["total_fm_day_gal"], payload["values"]["total_fm_day_bbls"],dayReset = totalizeDay(message["values"][device][measure]["raw_data"], device)
|
||||||
|
#payload["values"]["week_volume"], weekReset = totalizeWeek(message["values"][device][measure]["raw_data"])
|
||||||
|
payload["values"]["total_fm_month_gal"],payload["values"]["total_fm_month_bbls"], monthReset = totalizeMonth(message["values"][device][measure]["raw_data"], device)
|
||||||
|
#payload["values"]["year_volume"], yearReset = totalizeYear(message["values"][device][measure]["raw_data"])
|
||||||
|
payload["values"]["lifetime_flow_meter_gal"] = message["values"][device][measure]["raw_data"]
|
||||||
|
payload["values"]["lifetime_flow_meter_bbls"] = message["values"][device][measure]["raw_data"]/42
|
||||||
|
elif measure in ["raw_hand_input", "raw_auto_input", "raw_run_status", "raw_local_start","raw_overload_status"]:
|
||||||
|
payload["values"][measure] = convert_int(measure, message["values"][device][measure]["raw_data"])
|
||||||
|
else:
|
||||||
|
payload["values"][measure] = message["values"][device][measure]["raw_data"]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
|
||||||
|
if payload["values"]:
|
||||||
|
payload["values"]["plc_ping"] = "OK"
|
||||||
|
else:
|
||||||
|
payload["values"]["plc_ping"] = "Comms Error to PLC"
|
||||||
|
|
||||||
|
for measure in payload["values"].keys():
|
||||||
|
publish(__topic__+ device + ":01:99/" + measure, json.dumps({"value": payload["values"][measure]}), __qos__)
|
||||||
|
|
||||||
|
if dayReset:
|
||||||
|
resetPayload["values"]["total_fm_yesterday_gal"] = payload["values"]["total_fm_day_gal"]
|
||||||
|
resetPayload["values"]["total_fm_day_gal"] = 0
|
||||||
|
resetPayload["values"]["total_fm_yesterday_bbls"] = payload["values"]["total_fm_day_bbls"]
|
||||||
|
resetPayload["values"]["total_fm_day_bbls"] = 0
|
||||||
|
if weekReset:
|
||||||
|
resetPayload["values"]["last_week_volume"] = payload["values"]["week_volume"]
|
||||||
|
resetPayload["values"]["week_volume"] = 0
|
||||||
|
if monthReset:
|
||||||
|
resetPayload["values"]["total_fm_last_month_gal"] = payload["values"]["total_fm_month_gal"]
|
||||||
|
resetPayload["values"]["total_fm_month_gal"] = 0
|
||||||
|
resetPayload["values"]["total_fm_last_month_bbls"] = payload["values"]["total_fm_month_bbls"]
|
||||||
|
resetPayload["values"]["total_fm_month_bbls"] = 0
|
||||||
|
if yearReset:
|
||||||
|
resetPayload["values"]["last_year_volume"] = payload["values"]["year_volume"]
|
||||||
|
resetPayload["values"]["year_volume"] = 0
|
||||||
|
|
||||||
|
if resetPayload["values"]:
|
||||||
|
for measure in resetPayload["values"].keys():
|
||||||
|
publish(__topic__+ device + ":01:99/" + measure, json.dumps({"value": resetPayload["values"][measure]}), __qos__)
|
||||||
|
|
||||||
|
def saveTotalizers(totalizers, mac):
|
||||||
|
try:
|
||||||
|
mac = "".join(mac.split(":"))
|
||||||
|
with open(f"/var/user/files/totalizers_{mac}.json", "w") as t:
|
||||||
|
json.dump(totalizers,t)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
|
||||||
|
|
||||||
|
def totalizeDay(lifetime, mac):
|
||||||
|
totalizers = get_totalizers(mac)
|
||||||
|
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||||
|
reset = False
|
||||||
|
value = lifetime - totalizers["dayHolding"]
|
||||||
|
if not int(now.strftime("%d")) == int(totalizers["day"]):
|
||||||
|
totalizers["dayHolding"] = lifetime
|
||||||
|
totalizers["day"] = int(now.strftime("%d"))
|
||||||
|
saveTotalizers(totalizers, mac)
|
||||||
|
reset = True
|
||||||
|
return (value,value / 42,reset)
|
||||||
|
|
||||||
|
def totalizeWeek(lifetime, mac):
|
||||||
|
totalizers = get_totalizers(mac)
|
||||||
|
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||||
|
reset = False
|
||||||
|
value = lifetime - totalizers["weekHolding"]
|
||||||
|
if (not now.strftime("%U") == totalizers["week"] and now.strftime("%a") == "Sun") or totalizers["week"] == 0:
|
||||||
|
totalizers["weekHolding"] = lifetime
|
||||||
|
totalizers["week"] = now.strftime("%U")
|
||||||
|
saveTotalizers(totalizers, mac)
|
||||||
|
reset = True
|
||||||
|
return (value,value / 42, reset)
|
||||||
|
|
||||||
|
def totalizeMonth(lifetime, mac):
|
||||||
|
totalizers = get_totalizers(mac)
|
||||||
|
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||||
|
reset = False
|
||||||
|
value = lifetime - totalizers["monthHolding"]
|
||||||
|
if not int(now.strftime("%m")) == int(totalizers["month"]):
|
||||||
|
totalizers["monthHolding"] = lifetime
|
||||||
|
totalizers["month"] = now.strftime("%m")
|
||||||
|
saveTotalizers(totalizers, mac)
|
||||||
|
reset = True
|
||||||
|
return (value,value / 42,reset)
|
||||||
|
|
||||||
|
def totalizeYear(lifetime, mac):
|
||||||
|
totalizers = get_totalizers(mac)
|
||||||
|
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||||
|
reset = False
|
||||||
|
value = lifetime - totalizers["yearHolding"]
|
||||||
|
if not int(now.strftime("%Y")) == int(totalizers["year"]):
|
||||||
|
totalizers["yearHolding"] = lifetime
|
||||||
|
totalizers["year"] = now.strftime("%Y")
|
||||||
|
saveTotalizers(totalizers, mac)
|
||||||
|
reset = True
|
||||||
|
return (value,value / 42, reset)
|
||||||
|
|
||||||
|
def convert_int(plc_tag, value):
|
||||||
|
input_codes = {
|
||||||
|
0: "Off",
|
||||||
|
1: "On"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_status_codes = {
|
||||||
|
0: "Stopped",
|
||||||
|
1: "Running"
|
||||||
|
}
|
||||||
|
|
||||||
|
overload_codes = {
|
||||||
|
0: "Good",
|
||||||
|
1: "Down on Overload Tripped"
|
||||||
|
}
|
||||||
|
|
||||||
|
plc_tags = {
|
||||||
|
"raw_hand_input": input_codes.get(value, "Invalid Code"),
|
||||||
|
"raw_local_start": input_codes.get(value, "Invalid Code"),
|
||||||
|
"raw_auto_input": input_codes.get(value, "Invalid Code"),
|
||||||
|
"raw_run_status": run_status_codes.get(value, "Invalid Code"),
|
||||||
|
"raw_overload_status": overload_codes.get(value, "Invalid Code")
|
||||||
|
}
|
||||||
|
|
||||||
|
return plc_tags.get(plc_tag, "Invalid Tag")
|
||||||
92
Pub_Sub/plcfreshwater/mistaway/v1/sub/receiveCommandv2.py
Normal file
92
Pub_Sub/plcfreshwater/mistaway/v1/sub/receiveCommandv2.py
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
# Enter your python code.
|
||||||
|
import json
|
||||||
|
from quickfaas.measure import recall
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.measure import write_plc_values
|
||||||
|
|
||||||
|
def sync(mac,value, wizard_api):
|
||||||
|
#get new values and send
|
||||||
|
try:
|
||||||
|
data = recall()#json.loads(recall().decode("utf-8"))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
logger.info(data)
|
||||||
|
|
||||||
|
for controller in data:
|
||||||
|
if controller["name"] == mac[:-6]:
|
||||||
|
if controller["health"] == 1:
|
||||||
|
publish("meshify/db/194/_/plcfreshwater/" + mac + "/plc_ping", json.dumps({"value": "OK"}))
|
||||||
|
else:
|
||||||
|
publish("meshify/db/194/_/plcfreshwater/" + mac + "/plc_ping", json.dumps({"value": "Comms Error to PLC"}))
|
||||||
|
for measure in controller["measures"]:
|
||||||
|
#publish measure
|
||||||
|
topic = "meshify/db/194/_/plcfreshwater/" + mac + "/" + measure["name"]
|
||||||
|
if measure["name"] in ["raw_hand_input", "raw_auto_input", "raw_run_status", "raw_local_start","raw_overload_status"]:
|
||||||
|
payload = [{"value": convert_int(measure["name"], measure["value"])}]
|
||||||
|
else:
|
||||||
|
payload = [{"value": measure["value"]}]
|
||||||
|
logger.debug("Sending on topic: {}".format(topic))
|
||||||
|
logger.debug("Sending value: {}".format(payload))
|
||||||
|
publish(topic, json.dumps(payload))
|
||||||
|
def writeplctag(mac, value, wizard_api):
|
||||||
|
try:
|
||||||
|
value = json.loads(value.replace("'",'"'))
|
||||||
|
logger.debug(value)
|
||||||
|
if value["tag"] == "CMD_Cloud_Control":
|
||||||
|
value["tag"] = "cmd_cloud_control"
|
||||||
|
message = {mac[:-6]:{value["tag"]: value["val"]}}
|
||||||
|
write_plc_values(message)
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(e)
|
||||||
|
|
||||||
|
def receiveCommand(topic, payload, wizard_api):
|
||||||
|
logger.debug(topic)
|
||||||
|
logger.debug(json.loads(payload))
|
||||||
|
p = json.loads(payload)[0]
|
||||||
|
command = p["payload"]["name"].split(".")[1]
|
||||||
|
commands = {
|
||||||
|
"sync": sync,
|
||||||
|
"writeplctag": writeplctag,
|
||||||
|
}
|
||||||
|
commands[command](p["mac"].lower(),p["payload"]["value"], wizard_api)
|
||||||
|
#logger.debug(command)
|
||||||
|
ack(p["msgId"], p["mac"], command, p["payload"]["name"].split(".")[1], p["payload"]["value"], wizard_api)
|
||||||
|
|
||||||
|
def ack(msgid, mac, name, command, value, wizard_api):
|
||||||
|
#logger.debug(mac)
|
||||||
|
macsquish = "".join(mac.split(":")[:-2])
|
||||||
|
maclower = ":".join(mac.split(":")[:-2])
|
||||||
|
maclower = maclower.lower()
|
||||||
|
#logger.debug(msgid)
|
||||||
|
#logger.debug(mac)
|
||||||
|
#logger.debug(name)
|
||||||
|
#logger.debug(value)
|
||||||
|
publish("meshify/responses/" + str(msgid), json.dumps([{"value": "{} Success Setting: {} To: {}".format(macsquish,name, value), "msgid": str(msgid)}]))
|
||||||
|
publish("meshify/db/194/_/mainMeshify/" + maclower + ":00:00/commands", json.dumps([{"value": {"status": "success", "value": str(value), "channel": command}, "msgid": str(msgid)}]))
|
||||||
|
|
||||||
|
def convert_int(plc_tag, value):
|
||||||
|
input_codes = {
|
||||||
|
0: "Off",
|
||||||
|
1: "On"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_status_codes = {
|
||||||
|
0: "Stopped",
|
||||||
|
1: "Running"
|
||||||
|
}
|
||||||
|
|
||||||
|
overload_codes = {
|
||||||
|
0: "Good",
|
||||||
|
1: "Down on Overload Tripped"
|
||||||
|
}
|
||||||
|
|
||||||
|
plc_tags = {
|
||||||
|
"raw_hand_input": input_codes.get(value, "Invalid Code"),
|
||||||
|
"raw_local_start": input_codes.get(value, "Invalid Code"),
|
||||||
|
"raw_auto_input": input_codes.get(value, "Invalid Code"),
|
||||||
|
"raw_run_status": run_status_codes.get(value, "Invalid Code"),
|
||||||
|
"raw_overload_status": overload_codes.get(value, "Invalid Code")
|
||||||
|
}
|
||||||
|
|
||||||
|
return plc_tags.get(plc_tag, "Invalid Tag")
|
||||||
Reference in New Issue
Block a user