minor updates

This commit is contained in:
Nico Melone
2023-09-06 17:13:21 -05:00
parent 52aa0672b3
commit c6a27c174d
13 changed files with 13274 additions and 873 deletions

BIN
Pub_Sub/.DS_Store vendored

Binary file not shown.

Binary file not shown.

View File

@@ -17,7 +17,7 @@
"outputs": [],
"source": [
"root = \"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub\"\n",
"devicetype = \"plcfreshwater_advvfdipp\"\n",
"devicetype = \"hrvalvecontroller\"\n",
"platform = \"thingsboard\" #\"mistaway\"\n",
"platform_short = \"tb\" if platform == \"thingsboard\" else \"ma\" \n",
"startversion = 1\n",
@@ -26,18 +26,18 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"checking path exists: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1/plcfreshwater_advvfdipp_tb_v1.cfg\n",
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1\n",
"Write Code Path: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1/pub/sendData.py\n",
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1/pub\n",
"Path didn't exist creating path: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1/pub\n"
"checking path exists: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/hrvalvecontroller/thingsboard/v1/hrvalvecontroller_tb_v1.cfg\n",
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/hrvalvecontroller/thingsboard/v1\n",
"Write Code Path: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/hrvalvecontroller/thingsboard/v1/pub/sendData.py\n",
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/hrvalvecontroller/thingsboard/v1/pub\n",
"Path didn't exist creating path: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/hrvalvecontroller/thingsboard/v1/pub\n"
]
}
],

7776
Pub_Sub/gateway/a_pond.cfg Normal file

File diff suppressed because it is too large Load Diff

4006
Pub_Sub/gateway/tree.cfg Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -1,5 +0,0 @@
MeasuringPointName,ControllerName,GroupName,UploadType,DeadZonePercent,DataType,ArrayIndex,EnableBit,BitIndex,reverseBit,Address,Decimal,Len,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit,Pt,Ct,Mapping_table,TransDecimal,bitMap,msecSample,storageLwTSDB,DataEndianReverse,ReadOffset,ReadLength,WriteOffset,WriteLength,DataParseMethod,BitId
pressure_1,hrflowskid,default,periodic,,FLOAT,,,,,Scaled_PSI,2,,ro,,,none,,,,,,,,,,,,,,,0,,,,,,,
flowrate_1,hrflowskid,default,periodic,,FLOAT,,,,,Scaled_Flow_Rate,2,,ro,,,none,,,,,,,,,,,,,,,0,,,,,,,
total_1_lifetime,hrflowskid,default,periodic,,FLOAT,,,,,Scaled_Lifetime_Flow_Rate,2,,ro,,,none,,,,,,,,,,,,,,,0,,,,,,,
valve_1_state,hrflowskid,default,periodic,,FLOAT,,,,,Scaled_Valve_FB,2,,ro,,,none,,,,,,,,,,,,,,,0,,,,,,,
1 MeasuringPointName ControllerName GroupName UploadType DeadZonePercent DataType ArrayIndex EnableBit BitIndex reverseBit Address Decimal Len ReadWrite Unit Description Transform Type MaxValue MinValue MaxScale MinScale Gain Offset startBit endBit Pt Ct Mapping_table TransDecimal bitMap msecSample storageLwTSDB DataEndianReverse ReadOffset ReadLength WriteOffset WriteLength DataParseMethod BitId
2 pressure_1 hrflowskid default periodic FLOAT Scaled_PSI 2 ro none 0
3 flowrate_1 hrflowskid default periodic FLOAT Scaled_Flow_Rate 2 ro none 0
4 total_1_lifetime hrflowskid default periodic FLOAT Scaled_Lifetime_Flow_Rate 2 ro none 0
5 valve_1_state hrflowskid default periodic FLOAT Scaled_Valve_FB 2 ro none 0

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,217 @@
import json, os
from datetime import datetime as dt
from datetime import timedelta as td
from common.Logger import logger
from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot():
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
if not os.path.exists(path):
logger.info("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
if not os.path.exists(path + "/" + filename):
logger.info("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
def convertDStoJSON(ds):
j = dict()
for x in ds:
j[x["key"]] = x["value"]
return j
def convertJSONtoDS(j):
d = []
for key in j.keys():
d.append({"key": key, "value": j[key]})
return d
def checkCredentialConfig():
logger.info("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.info(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
checkFileExist("creds.json")
with open(credspath, "r") as c:
creds = json.load(c)
if creds:
logger.info("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.info("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
def checkParameterConfig(cfg):
logger.info("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.info("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.info("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.info("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
def sendData(message, wizard_api):
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
resetPayload = {"ts": "", "values": {}}
try:
checkCredentialConfig()
except Exception as e:
logger.error(e)
logger.info(message)
for measure in message["values"]["hrvalvecontroller"].keys():
try:
logger.debug(measure)
if measure in ["totalizer_lifetime"]:
payload["values"]["today_total"], dayReset = totalizeDay(message["values"]["hrvalvecontroller"][measure]["raw_data"])
payload["values"]["month_total"], monthReset = totalizeMonth(message["values"]["hrvalvecontroller"][measure]["raw_data"])
payload["values"][measure] = message["values"]["hrvalvecontroller"][measure]["raw_data"]
except Exception as e:
logger.error(e)
publish(__topic__, json.dumps(payload), __qos__, cloud_name="default")
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__, cloud_name="default")
if dayReset:
resetPayload["values"]["yesterday_volume"] = payload["values"]["today_volume"]
resetPayload["values"]["today_volume"] = 0
if monthReset:
resetPayload["values"]["last_month_volume"] = payload["values"]["month_volume"]
resetPayload["values"]["month_volume"] = 0
if resetPayload["values"]:
resetPayload["ts"] = 1000 + (round(dt.timestamp(dt.now())/600)*600)*1000
publish(__topic__, json.dumps(resetPayload), __qos__, cloud_name="default")
def get_totalizers():
try:
with open("/var/user/files/totalizers.json", "r") as t:
totalizers = json.load(t)
if not totalizers:
logger.debug("-----INITIALIZING TOTALIZERS-----")
totalizers = {
"dayDate": "2022-01-01",
"week": 0,
"monthDate": "2022-01-01",
"year": 0,
"lifetime": 0,
"dayHolding": 0,
"weekHolding": 0,
"monthHolding": 0,
"yearHolding": 0
}
except:
totalizers = {
"dayDate": "2022-01-01",
"week": 0,
"monthDate": "2022-01-01",
"year": 0,
"lifetime": 0,
"dayHolding": 0,
"weekHolding": 0,
"monthHolding": 0,
"yearHolding": 0
}
return totalizers
def saveTotalizers(totalizers):
try:
with open("/var/user/files/totalizers.json", "w") as t:
json.dump(totalizers,t)
except Exception as e:
logger.error(e)
def totalizeDay(lifetime):
totalizers = get_totalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
now = now - td(seconds=60*60*8) #time shifted back 8 hours
reset = False
value = lifetime - totalizers["dayHolding"]
if not now.date() == dt.strptime(totalizers["dayDate"], "%Y-%m-%d").date():
totalizers["dayHolding"] = lifetime
totalizers["dayDate"] = str(now.date())
saveTotalizers(totalizers)
reset = True
return (value,reset)
def totalizeMonth(lifetime):
totalizers = get_totalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
now = now - td(seconds=60*60*8) #time shifted back 8 hours
now = dt.strptime(f"{now.year}-{now.month}", "%Y-%m")
reset = False
value = lifetime - totalizers["monthHolding"]
if not now.date() == dt.strptime(totalizers["monthDate"], "%Y-%m-%d").date():
totalizers["monthHolding"] = lifetime
totalizers["monthDate"] = str(now.date())
saveTotalizers(totalizers)
reset = True
return (value,reset)

View File

@@ -147,6 +147,7 @@ def sendData(message):
publish(__topic__ + ":01:99/" + "plc_ping", json.dumps({"value": "OK"}), __qos__)
else:
publish(__topic__ + ":01:99/" + "plc_ping", json.dumps({"value": "Comms Error to PLC"}), __qos__)
for measure in message["measures"]:
try:
logger.debug(measure)
@@ -185,4 +186,3 @@ def convert_int(plc_tag, value):
return plc_tags.get(plc_tag, "Invalid Tag")

File diff suppressed because one or more lines are too long