updates 2024-07-31

This commit is contained in:
Nico Melone
2024-07-31 13:56:21 -05:00
parent 5af6c48ade
commit 79b2f149df
62 changed files with 443262 additions and 993 deletions

View File

@@ -0,0 +1,190 @@
# Enter your python code.
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
from mobiuspi_lib.gps import GPS
def reboot():
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
if not os.path.exists(path):
logger.info("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
if not os.path.exists(path + "/" + filename):
logger.info("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
def convertDStoJSON(ds):
j = dict()
for x in ds:
j[x["key"]] = x["value"]
return j
def convertJSONtoDS(j):
d = []
for key in j.keys():
d.append({"key": key, "value": j[key]})
return d
def checkCredentialConfig():
logger.info("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.info(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
checkFileExist("creds.json")
with open(credspath, "r") as c:
creds = json.load(c)
if creds:
logger.info("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.info("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
def checkParameterConfig(cfg):
logger.info("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.info("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.info("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.info("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
def getGPS():
# Create a gps instance
gps = GPS()
# Retrieve GPS information
position_status = gps.get_position_status()
logger.debug("position_status: ")
logger.debug(position_status)
latitude = position_status["latitude"].split(" ")
longitude = position_status["longitude"].split(" ")
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
if latitude[2] == "S":
lat_dec = lat_dec * -1
if longitude[2] == "W":
lon_dec = lon_dec * -1
#lat_dec = round(lat_dec, 7)
#lon_dec = round(lon_dec, 7)
logger.info("HERE IS THE GPS COORDS")
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
speedKnots = position_status["speed"].split(" ")
speedMPH = float(speedKnots[0]) * 1.151
return (f"{lat_dec:.8f}",f"{lon_dec:.8f}",f"{speedMPH:.2f}")
def chunk_payload(payload, chunk_size=20):
if "values" in payload:
# Original format: {"ts": ..., "values": {...}}
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
else:
# New format: {"key1": "value1", "key2": "value2"}
chunked_keys = list(payload.keys())
for i in range(0, len(chunked_keys), chunk_size):
yield {k: payload[k] for k in chunked_keys[i:i+chunk_size]}
def sendData(message):
#logger.debug(message)
try:
checkCredentialConfig()
except Exception as e:
logger.error(e)
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
attributes_payload = {}
for measure in message["measures"]:
try:
logger.debug(measure)
if measure["health"] == 1:
if "_spt" in measure["name"]:
attributes_payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
except Exception as e:
logger.error(e)
try:
payload["values"]["latitude"], payload["values"]["longitude"], payload["values"]["speed"] = getGPS()
except:
logger.error("Could not get GPS coordinates")
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
attributes_payload["latestReportTime"] = (round(dt.timestamp(dt.now())/600)*600)*1000
for chunk in chunk_payload(payload=attributes_payload):
publish("v1/devices/me/attributes", json.dumps(chunk), __qos__)
time.sleep(2)

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,10 @@
MeasuringPointName,ControllerName,GroupName,UploadType,DataType,EnableBit,BitIndex,reverseBit,Address,Decimal,Len,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit,Pt,Ct,Mapping_table,TransDecimal,bitMap,msecSample,DataEndianReverse,ReadOffset,ReadLength,DataParseMethod,BitId,storageLwTSDB
water_tank_01_level,sp_transfer,default,periodic,FLOAT,,,,Water_Tank_1_Level,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,0
hand_input,sp_transfer,default,periodic,BIT,,,0,Raw_Hand_Input,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,0
local_start_input,sp_transfer,default,periodic,BIT,,,0,Raw_Local_Start,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,0
auto_input,sp_transfer,default,periodic,BIT,,,0,Raw_Auto_Input,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,0
cloud_control_cmd,sp_transfer,default,periodic,BIT,,,0,CMD_Cloud_Control,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,0
start_permissive_spt,sp_transfer,default,periodic,FLOAT,,,,WTP_Start_Setpoint,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,0
stop_permissive_spt,sp_transfer,default,periodic,FLOAT,,,,WTP_Stop_Setpoint,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,0
water_tank_01_hi_alm,sp_transfer,default,periodic,BIT,,,0,WT1_Hi_AL0,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,0
water_tank_01_hihi_alm,sp_transfer,default,periodic,BIT,,,0,WT1_HiHi_AL0,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,0
1 MeasuringPointName ControllerName GroupName UploadType DataType EnableBit BitIndex reverseBit Address Decimal Len ReadWrite Unit Description Transform Type MaxValue MinValue MaxScale MinScale Gain Offset startBit endBit Pt Ct Mapping_table TransDecimal bitMap msecSample DataEndianReverse ReadOffset ReadLength DataParseMethod BitId storageLwTSDB
2 water_tank_01_level sp_transfer default periodic FLOAT Water_Tank_1_Level 2 ro none 0
3 hand_input sp_transfer default periodic BIT 0 Raw_Hand_Input ro none 0 0
4 local_start_input sp_transfer default periodic BIT 0 Raw_Local_Start ro none 0 0
5 auto_input sp_transfer default periodic BIT 0 Raw_Auto_Input ro none 0 0
6 cloud_control_cmd sp_transfer default periodic BIT 0 CMD_Cloud_Control rw none 0 0
7 start_permissive_spt sp_transfer default periodic FLOAT WTP_Start_Setpoint 2 rw none 0
8 stop_permissive_spt sp_transfer default periodic FLOAT WTP_Stop_Setpoint 2 rw none 0
9 water_tank_01_hi_alm sp_transfer default periodic BIT 0 WT1_Hi_AL0 ro none 0 0
10 water_tank_01_hihi_alm sp_transfer default periodic BIT 0 WT1_HiHi_AL0 ro none 0 0

View File

@@ -0,0 +1,64 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
except Exception as e:
logger.error(e)
logger.debug(data)
for controller in data:
for measure in controller["measures"]:
#publish measure
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:
#value = json.loads(value.replace("'",'"'))
logger.debug(value)
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
message = [{"name": "sp_transfer", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
resp = write(message)
logger.debug("RETURN FROM WRITE: {}".format(resp))
return True
except Exception as e:
logger.debug(e)
return False
def receiveAttributes(topic, payload):
try:
logger.debug(topic)
logger.debug(json.loads(payload))
p = json.loads(payload)
for key, value in p.items():
try:
result = writeplctag({"measurement":key, "value":value})
logger.debug(result)
except Exception as e:
logger.error(e)
#logger.debug(command)
sync()
except Exception as e:
logger.debug(e)

View File

@@ -0,0 +1,77 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
except Exception as e:
logger.error(e)
logger.debug(data)
for controller in data:
for measure in controller["measures"]:
#publish measure
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:
#value = json.loads(value.replace("'",'"'))
logger.debug(value)
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
message = [{"name": "sp_transfer", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
resp = write(message)
logger.debug("RETURN FROM WRITE: {}".format(resp))
return True
except Exception as e:
logger.debug(e)
return False
def receiveCommand(topic, payload):
try:
logger.debug(topic)
logger.debug(json.loads(payload))
p = json.loads(payload)
command = p["method"]
commands = {
"sync": sync,
"writeplctag": writeplctag,
}
if command == "setPLCTag":
try:
result = commands["writeplctag"](p["params"])
logger.debug(result)
except Exception as e:
logger.error(e)
#logger.debug(command)
ack(topic.split("/")[-1])
time.sleep(5)
sync()
except Exception as e:
logger.debug(e)
def ack(msgid):
#logger.debug(msgid)
#logger.debug(mac)
#logger.debug(name)
#logger.debug(value)
publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}), 1)