227 lines
9.2 KiB
Python
227 lines
9.2 KiB
Python
import json, os, time
|
|
from datetime import datetime as dt
|
|
from datetime import timedelta as td
|
|
from common.Logger import logger
|
|
from quickfaas.remotebus import publish
|
|
from quickfaas.global_dict import get as get_params
|
|
from quickfaas.global_dict import _set_global_args
|
|
|
|
def reboot():
|
|
#basic = Basic()
|
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
|
logger.info(f"REBOOT : {r}")
|
|
|
|
def checkFileExist(filename):
|
|
path = "/var/user/files"
|
|
if not os.path.exists(path):
|
|
logger.info("no folder making files folder in var/user")
|
|
os.makedirs(path)
|
|
with open(path + "/" + filename, "a") as f:
|
|
json.dump({}, f)
|
|
if not os.path.exists(path + "/" + filename):
|
|
logger.info("no creds file making creds file")
|
|
with open(path + "/" + filename, "a") as f:
|
|
json.dump({}, f)
|
|
|
|
def convertDStoJSON(ds):
|
|
j = dict()
|
|
for x in ds:
|
|
j[x["key"]] = x["value"]
|
|
return j
|
|
|
|
def convertJSONtoDS(j):
|
|
d = []
|
|
for key in j.keys():
|
|
d.append({"key": key, "value": j[key]})
|
|
return d
|
|
|
|
def checkCredentialConfig():
|
|
logger.info("CHECKING CONFIG")
|
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
|
credspath = "/var/user/files/creds.json"
|
|
cfg = dict()
|
|
with open(cfgpath, "r") as f:
|
|
cfg = json.load(f)
|
|
clouds = cfg.get("clouds")
|
|
logger.info(clouds)
|
|
#if not configured then try to configure from stored values
|
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
|
checkFileExist("creds.json")
|
|
with open(credspath, "r") as c:
|
|
creds = json.load(c)
|
|
if creds:
|
|
logger.info("updating config with stored data")
|
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
|
clouds[0]["args"]["username"] = creds["userName"]
|
|
clouds[0]["args"]["passwd"] = creds["password"]
|
|
cfg["clouds"] = clouds
|
|
cfg = checkParameterConfig(cfg)
|
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
|
reboot()
|
|
else:
|
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
|
checkFileExist("creds.json")
|
|
with open(credspath, "r") as c:
|
|
logger.info("updating stored file with new data")
|
|
cfg = checkParameterConfig(cfg)
|
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
|
creds = json.load(c)
|
|
if creds:
|
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
|
creds["userName"] = clouds[0]["args"]["username"]
|
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
|
creds["password"] = clouds[0]["args"]["passwd"]
|
|
else:
|
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
|
creds["userName"] = clouds[0]["args"]["username"]
|
|
creds["password"] = clouds[0]["args"]["passwd"]
|
|
with open(credspath, "w") as cw:
|
|
json.dump(creds,cw)
|
|
|
|
def checkParameterConfig(cfg):
|
|
logger.info("Checking Parameters!!!!")
|
|
paramspath = "/var/user/files/params.json"
|
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
|
#check stored values
|
|
checkFileExist("params.json")
|
|
with open(paramspath, "r") as f:
|
|
logger.info("Opened param storage file")
|
|
params = json.load(f)
|
|
if params:
|
|
if cfgparams != params:
|
|
#go through each param
|
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
|
#if key in cfg but not in params copy to params
|
|
logger.info("equalizing params between cfg and stored")
|
|
for key in cfgparams.keys():
|
|
try:
|
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
|
params[key] = cfgparams[key]
|
|
except:
|
|
params[key] = cfgparams[key]
|
|
cfg["labels"] = convertJSONtoDS(params)
|
|
_set_global_args(convertJSONtoDS(params))
|
|
with open(paramspath, "w") as p:
|
|
json.dump(params, p)
|
|
else:
|
|
with open(paramspath, "w") as p:
|
|
logger.info("initializing param file with params in memory")
|
|
json.dump(convertDStoJSON(get_params()), p)
|
|
cfg["labels"] = get_params()
|
|
|
|
return cfg
|
|
|
|
|
|
# Helper function to split the payload into chunks
|
|
def chunk_payload(payload, chunk_size=20):
|
|
chunked_values = list(payload["values"].items())
|
|
for i in range(0, len(chunked_values), chunk_size):
|
|
yield {
|
|
"ts": payload["ts"],
|
|
"values": dict(chunked_values[i:i+chunk_size])
|
|
}
|
|
|
|
def sendData(message):
|
|
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
|
resetPayload = {"ts": "", "values": {}}
|
|
try:
|
|
checkCredentialConfig()
|
|
except Exception as e:
|
|
logger.error(e)
|
|
logger.info(message)
|
|
for measure in message["values"]["hrvalvecontroller"].keys():
|
|
try:
|
|
logger.debug(measure)
|
|
if measure in ["totalizer_lifetime"]:
|
|
payload["values"]["today_total"], dayReset = totalizeDay(message["values"]["hrvalvecontroller"][measure]["raw_data"])
|
|
payload["values"]["month_total"], monthReset = totalizeMonth(message["values"]["hrvalvecontroller"][measure]["raw_data"])
|
|
|
|
payload["values"][measure] = message["values"]["hrvalvecontroller"][measure]["raw_data"]
|
|
except Exception as e:
|
|
logger.error(e)
|
|
for chunk in chunk_payload(payload=payload):
|
|
publish(__topic__, json.dumps(chunk), __qos__)
|
|
time.sleep(2)
|
|
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__, cloud_name="default")
|
|
|
|
if dayReset:
|
|
resetPayload["values"]["yesterday_volume"] = payload["values"]["today_volume"]
|
|
resetPayload["values"]["today_volume"] = 0
|
|
if monthReset:
|
|
resetPayload["values"]["last_month_volume"] = payload["values"]["month_volume"]
|
|
resetPayload["values"]["month_volume"] = 0
|
|
|
|
if resetPayload["values"]:
|
|
resetPayload["ts"] = 1000 + (round(dt.timestamp(dt.now())/600)*600)*1000
|
|
publish(__topic__, json.dumps(resetPayload), __qos__, cloud_name="default")
|
|
|
|
def get_totalizers():
|
|
try:
|
|
with open("/var/user/files/totalizers.json", "r") as t:
|
|
totalizers = json.load(t)
|
|
if not totalizers:
|
|
logger.debug("-----INITIALIZING TOTALIZERS-----")
|
|
totalizers = {
|
|
"dayDate": "2022-01-01",
|
|
"week": 0,
|
|
"monthDate": "2022-01-01",
|
|
"year": 0,
|
|
"lifetime": 0,
|
|
"dayHolding": 0,
|
|
"weekHolding": 0,
|
|
"monthHolding": 0,
|
|
"yearHolding": 0
|
|
}
|
|
except:
|
|
totalizers = {
|
|
"dayDate": "2022-01-01",
|
|
"week": 0,
|
|
"monthDate": "2022-01-01",
|
|
"year": 0,
|
|
"lifetime": 0,
|
|
"dayHolding": 0,
|
|
"weekHolding": 0,
|
|
"monthHolding": 0,
|
|
"yearHolding": 0
|
|
}
|
|
return totalizers
|
|
|
|
def saveTotalizers(totalizers):
|
|
try:
|
|
with open("/var/user/files/totalizers.json", "w") as t:
|
|
json.dump(totalizers,t)
|
|
except Exception as e:
|
|
logger.error(e)
|
|
|
|
|
|
def totalizeDay(lifetime):
|
|
totalizers = get_totalizers()
|
|
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
|
now = now - td(seconds=60*60*8) #time shifted back 8 hours
|
|
reset = False
|
|
value = lifetime - totalizers["dayHolding"]
|
|
if not now.date() == dt.strptime(totalizers["dayDate"], "%Y-%m-%d").date():
|
|
totalizers["dayHolding"] = lifetime
|
|
totalizers["dayDate"] = str(now.date())
|
|
saveTotalizers(totalizers)
|
|
reset = True
|
|
return (value,reset)
|
|
|
|
def totalizeMonth(lifetime):
|
|
totalizers = get_totalizers()
|
|
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
|
now = now - td(seconds=60*60*8) #time shifted back 8 hours
|
|
now = dt.strptime(f"{now.year}-{now.month}", "%Y-%m")
|
|
reset = False
|
|
value = lifetime - totalizers["monthHolding"]
|
|
if not now.date() == dt.strptime(totalizers["monthDate"], "%Y-%m-%d").date():
|
|
totalizers["monthHolding"] = lifetime
|
|
totalizers["monthDate"] = str(now.date())
|
|
saveTotalizers(totalizers)
|
|
reset = True
|
|
return (value,reset) |