updated with config saving
This commit is contained in:
BIN
Device_Supervisor-V2.2.1.tar.gz
Normal file
BIN
Device_Supervisor-V2.2.1.tar.gz
Normal file
Binary file not shown.
BIN
IG502-V2.0.0.r14208.bin
Normal file
BIN
IG502-V2.0.0.r14208.bin
Normal file
Binary file not shown.
BIN
IG502-V2.0.0.r14218.bin
Normal file
BIN
IG502-V2.0.0.r14218.bin
Normal file
Binary file not shown.
BIN
Pub_Sub/.DS_Store
vendored
BIN
Pub_Sub/.DS_Store
vendored
Binary file not shown.
BIN
Pub_Sub/__pycache__/convert_config.cpython-310.pyc
Normal file
BIN
Pub_Sub/__pycache__/convert_config.cpython-310.pyc
Normal file
Binary file not shown.
BIN
Pub_Sub/advvfdipp/.DS_Store
vendored
BIN
Pub_Sub/advvfdipp/.DS_Store
vendored
Binary file not shown.
82565
Pub_Sub/advvfdipp/advvfdipp_tags.json
Normal file
82565
Pub_Sub/advvfdipp/advvfdipp_tags.json
Normal file
File diff suppressed because it is too large
Load Diff
1097
Pub_Sub/advvfdipp/thingsboard/v2/advvfdipp_tb_v2.cfg
Normal file
1097
Pub_Sub/advvfdipp/thingsboard/v2/advvfdipp_tb_v2.cfg
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
309
Pub_Sub/advvfdipp/thingsboard/v3/pub/sendData.py
Normal file
309
Pub_Sub/advvfdipp/thingsboard/v3/pub/sendData.py
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
# Enter your python code.
|
||||||
|
import json, os
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
def reboot():
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.info("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.info("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.info("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.info(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.info("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.info("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
logger.info("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.info("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.info("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.info("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
def sendData(message):
|
||||||
|
#logger.debug(message)
|
||||||
|
try:
|
||||||
|
checkCredentialConfig()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
||||||
|
for measure in message["measures"]:
|
||||||
|
try:
|
||||||
|
logger.debug(measure)
|
||||||
|
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
|
||||||
|
logger.debug("Converting DINT/BOOL to STRING")
|
||||||
|
value = convert_int(measure["name"], measure["value"])
|
||||||
|
logger.debug("Converted {} to {}".format(measure["value"], value))
|
||||||
|
payload["values"][measure["name"]] = value
|
||||||
|
payload["values"][measure["name"] + "_int"] = measure["value"]
|
||||||
|
else:
|
||||||
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
|
||||||
|
publish(__topic__, json.dumps(payload), __qos__)
|
||||||
|
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||||
|
|
||||||
|
def convert_int(plc_tag, value):
|
||||||
|
well_status_codes = {
|
||||||
|
0: "Running",
|
||||||
|
1: "Pumped Off",
|
||||||
|
2: "Alarmed",
|
||||||
|
3: "Locked Out",
|
||||||
|
4: "Stopped"
|
||||||
|
}
|
||||||
|
|
||||||
|
pid_control_codes = {
|
||||||
|
0: "Flow",
|
||||||
|
1: "Fluid Level",
|
||||||
|
2: "Tubing Pressure",
|
||||||
|
3: "Manual"
|
||||||
|
}
|
||||||
|
|
||||||
|
downhole_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Connecting",
|
||||||
|
2: "Open Circuit",
|
||||||
|
3: "Shorted",
|
||||||
|
4: "Cannot Decode"
|
||||||
|
}
|
||||||
|
|
||||||
|
permissive_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Flow",
|
||||||
|
2: "Intake Pressure",
|
||||||
|
3: "Intake Temperature",
|
||||||
|
4: "Tubing Pressure",
|
||||||
|
5: "VFD",
|
||||||
|
6: "Fluid Level",
|
||||||
|
7: "Min. Downtime"
|
||||||
|
}
|
||||||
|
|
||||||
|
alarm_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Alarm"
|
||||||
|
}
|
||||||
|
|
||||||
|
alarm_vfd_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Locked Out"
|
||||||
|
}
|
||||||
|
|
||||||
|
vfd_fault_codes = {
|
||||||
|
0: "No Fault",
|
||||||
|
2: "Auxiliary Input",
|
||||||
|
3: "Power Loss",
|
||||||
|
4: "UnderVoltage",
|
||||||
|
5: "OverVoltage",
|
||||||
|
7: "Motor Overload",
|
||||||
|
8: "Heatsink OverTemp",
|
||||||
|
9: "Thermister OverTemp",
|
||||||
|
10: "Dynamic Brake OverTemp",
|
||||||
|
12: "Hardware OverCurrent",
|
||||||
|
13: "Ground Fault",
|
||||||
|
14: "Ground Warning",
|
||||||
|
15: "Load Loss",
|
||||||
|
17: "Input Phase Loss",
|
||||||
|
18: "Motor PTC Trip",
|
||||||
|
19: "Task Overrun",
|
||||||
|
20: "Torque Prove Speed Band",
|
||||||
|
21: "Output Phase Loss",
|
||||||
|
24: "Decel Inhibit",
|
||||||
|
25: "OverSpeed Limit",
|
||||||
|
26: "Brake Slipped",
|
||||||
|
27: "Torque Prove Conflict",
|
||||||
|
28: "TP Encls Confict",
|
||||||
|
29: "Analog In Loss",
|
||||||
|
33: "Auto Restarts Exhausted",
|
||||||
|
35: "IPM OverCurrent",
|
||||||
|
36: "SW OverCurrent",
|
||||||
|
38: "Phase U to Ground",
|
||||||
|
39: "Phase V to Ground",
|
||||||
|
40: "Phase W to Ground",
|
||||||
|
41: "Phase UV Short",
|
||||||
|
42: "Phase VW Short",
|
||||||
|
43: "Phase WU Short",
|
||||||
|
44: "Phase UNeg to Ground",
|
||||||
|
45: "Phase VNeg to Ground",
|
||||||
|
46: "Phase WNeg to Ground",
|
||||||
|
48: "System Defaulted",
|
||||||
|
49: "Drive Powerup",
|
||||||
|
51: "Clear Fault Queue",
|
||||||
|
55: "Control Board Overtemp",
|
||||||
|
59: "Invalid Code",
|
||||||
|
61: "Shear Pin 1",
|
||||||
|
62: "Shear Pin 2",
|
||||||
|
64: "Drive Overload",
|
||||||
|
66: "OW Torque Level",
|
||||||
|
67: "Pump Off",
|
||||||
|
71: "Port 1 Adapter",
|
||||||
|
72: "Port 2 Adapter",
|
||||||
|
73: "Port 3 Adapter",
|
||||||
|
74: "Port 4 Adapter",
|
||||||
|
75: "Port 5 Adapter",
|
||||||
|
76: "Port 6 Adapter",
|
||||||
|
77: "IR Volts Range",
|
||||||
|
78: "FluxAmps Ref Range",
|
||||||
|
79: "Excessive Load",
|
||||||
|
80: "AutoTune Aborted",
|
||||||
|
81: "Port 1 DPI Loss",
|
||||||
|
82: "Port 2 DPI Loss",
|
||||||
|
83: "Port 3 DPI Loss",
|
||||||
|
84: "Port 4 DPI Loss",
|
||||||
|
85: "Port 5 DPI Loss",
|
||||||
|
86: "Port 6 DPI Loss",
|
||||||
|
87: "IXo Voltage Range",
|
||||||
|
91: "Primary Velocity Feedback Loss",
|
||||||
|
93: "Hardware Enable Check",
|
||||||
|
94: "Alternate Velocity Feedback Loss",
|
||||||
|
95: "Auxiliary Velocity Feedback Loss",
|
||||||
|
96: "Position Feedback Loss",
|
||||||
|
97: "Auto Tach Switch",
|
||||||
|
100: "Parameter Checksum",
|
||||||
|
101: "Power Down NVS Blank",
|
||||||
|
102: "NVS Not Blank",
|
||||||
|
103: "Power Down NVS Incompatible",
|
||||||
|
104: "Power Board Checksum",
|
||||||
|
106: "Incompat MCB-PB",
|
||||||
|
107: "Replaced MCB-PB",
|
||||||
|
108: "Analog Calibration Checksum",
|
||||||
|
110: "Invalid Power Board Data",
|
||||||
|
111: "Power Board Invalid ID",
|
||||||
|
112: "Power Board App Min Version",
|
||||||
|
113: "Tracking DataError",
|
||||||
|
115: "Power Down Table Full",
|
||||||
|
116: "Power Down Entry Too Large",
|
||||||
|
117: "Power Down Data Checksum",
|
||||||
|
118: "Power Board Power Down Checksum",
|
||||||
|
124: "App ID Changed",
|
||||||
|
125: "Using Backup App",
|
||||||
|
134: "Start on Power Up",
|
||||||
|
137: "External Precharge Error",
|
||||||
|
138: "Precharge Open",
|
||||||
|
141: "Autotune Enc Angle",
|
||||||
|
142: "Autotune Speed Restricted",
|
||||||
|
143: "Autotune Current Regulator",
|
||||||
|
144: "Autotune Inertia",
|
||||||
|
145: "Autotune Travel",
|
||||||
|
13035: "Net IO Timeout",
|
||||||
|
13037: "Net IO Timeout"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
plc_tags = {
|
||||||
|
"wellstatus": well_status_codes.get(value, "Invalid Code"),
|
||||||
|
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
|
||||||
|
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"runpermissive": permissive_codes.get(value, "Invalid Code"),
|
||||||
|
"startpermissive": permissive_codes.get(value, "Invalid Code"),
|
||||||
|
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
|
||||||
|
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
|
||||||
|
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
|
||||||
|
}
|
||||||
|
|
||||||
|
return plc_tags.get(plc_tag, "Invalid Tag")
|
||||||
|
|
||||||
|
|
||||||
266
Pub_Sub/advvfdipp/thingsboard/v3/sub/receiveCommand.py
Normal file
266
Pub_Sub/advvfdipp/thingsboard/v3/sub/receiveCommand.py
Normal file
@@ -0,0 +1,266 @@
|
|||||||
|
import json, time
|
||||||
|
from quickfaas.measure import recall, write
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from common.Logger import logger
|
||||||
|
|
||||||
|
def sync():
|
||||||
|
#get new values and send
|
||||||
|
payload = {}
|
||||||
|
topic = "v1/devices/me/telemetry"
|
||||||
|
try:
|
||||||
|
data = recall()#json.loads(recall().decode("utf-8"))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
logger.debug(data)
|
||||||
|
for controller in data:
|
||||||
|
for measure in controller["measures"]:
|
||||||
|
#publish measure
|
||||||
|
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
|
||||||
|
payload[measure["name"]] = convert_int(measure["name"], measure["value"])
|
||||||
|
payload[measure["name"]+ "_int"] = measure["value"]
|
||||||
|
else:
|
||||||
|
payload[measure["name"]] = measure["value"]
|
||||||
|
logger.debug("Sending on topic: {}".format(topic))
|
||||||
|
logger.debug("Sending value: {}".format(payload))
|
||||||
|
publish(topic, json.dumps(payload), 1)
|
||||||
|
def writeplctag(value):
|
||||||
|
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
|
||||||
|
try:
|
||||||
|
#value = json.loads(value.replace("'",'"'))
|
||||||
|
logger.debug(value)
|
||||||
|
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
|
||||||
|
message = [{"name": "advvfdipp", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
|
||||||
|
resp = write(message)
|
||||||
|
logger.debug("RETURN FROM WRITE: {}".format(resp))
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def receiveCommand(topic, payload):
|
||||||
|
try:
|
||||||
|
logger.debug(topic)
|
||||||
|
logger.debug(json.loads(payload))
|
||||||
|
p = json.loads(payload)
|
||||||
|
command = p["method"]
|
||||||
|
commands = {
|
||||||
|
"sync": sync,
|
||||||
|
"writeplctag": writeplctag,
|
||||||
|
}
|
||||||
|
if command == "setPLCTag":
|
||||||
|
try:
|
||||||
|
result = commands["writeplctag"](p["params"])
|
||||||
|
logger.debug(result)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
elif command == "changeSetpoint":
|
||||||
|
try:
|
||||||
|
logger.debug("attempting controlpoint write")
|
||||||
|
params_type = {"measurement": "pidcontrolmode", "value": p["params"]["setpointType"]}
|
||||||
|
if params_type["value"]:
|
||||||
|
commands["writeplctag"](params_type)
|
||||||
|
time.sleep(2)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("DID NOT WRITE CONTROL MODE")
|
||||||
|
logger.error(e)
|
||||||
|
try:
|
||||||
|
logger.debug("attempting setpoint write")
|
||||||
|
modes = {0: "flowsetpoint", 1: "fluidlevelsetpoint", 2: "tubingpressuresetpoint", 3: "manualfrequencysetpoint"}
|
||||||
|
params_value = {"value": p["params"]["setpointValue"]}
|
||||||
|
if params_value["value"]:
|
||||||
|
params_value["measurement"] = modes[getMode()]
|
||||||
|
result = commands["writeplctag"](params_value)
|
||||||
|
logger.debug(result)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("DID NOT WRITE SETPOINT")
|
||||||
|
logger.error(e)
|
||||||
|
#logger.debug(command)
|
||||||
|
ack(topic.split("/")[-1])
|
||||||
|
time.sleep(5)
|
||||||
|
sync()
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(e)
|
||||||
|
|
||||||
|
|
||||||
|
def ack(msgid):
|
||||||
|
#logger.debug(msgid)
|
||||||
|
#logger.debug(mac)
|
||||||
|
#logger.debug(name)
|
||||||
|
#logger.debug(value)
|
||||||
|
publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}), 1)
|
||||||
|
|
||||||
|
def getMode():
|
||||||
|
try:
|
||||||
|
data = recall()
|
||||||
|
for controller in data:
|
||||||
|
for measure in controller["measures"]:
|
||||||
|
if measure["name"] == "pidcontrolmode":
|
||||||
|
return measure["value"]
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def convert_int(plc_tag, value):
|
||||||
|
well_status_codes = {
|
||||||
|
0: "Running",
|
||||||
|
1: "Pumped Off",
|
||||||
|
2: "Alarmed",
|
||||||
|
3: "Locked Out",
|
||||||
|
4: "Stopped"
|
||||||
|
}
|
||||||
|
|
||||||
|
pid_control_codes = {
|
||||||
|
0: "Flow",
|
||||||
|
1: "Fluid Level",
|
||||||
|
2: "Tubing Pressure",
|
||||||
|
3: "Manual"
|
||||||
|
}
|
||||||
|
|
||||||
|
downhole_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Connecting",
|
||||||
|
2: "Open Circuit",
|
||||||
|
3: "Shorted",
|
||||||
|
4: "Cannot Decode"
|
||||||
|
}
|
||||||
|
|
||||||
|
permissive_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Flow",
|
||||||
|
2: "Intake Pressure",
|
||||||
|
3: "Intake Temperature",
|
||||||
|
4: "Tubing Pressure",
|
||||||
|
5: "VFD",
|
||||||
|
6: "Fluid Level",
|
||||||
|
7: "Min. Downtime"
|
||||||
|
}
|
||||||
|
|
||||||
|
alarm_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Alarm"
|
||||||
|
}
|
||||||
|
|
||||||
|
alarm_vfd_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Locked Out"
|
||||||
|
}
|
||||||
|
|
||||||
|
vfd_fault_codes = {
|
||||||
|
0: "No Fault",
|
||||||
|
2: "Auxiliary Input",
|
||||||
|
3: "Power Loss",
|
||||||
|
4: "UnderVoltage",
|
||||||
|
5: "OverVoltage",
|
||||||
|
7: "Motor Overload",
|
||||||
|
8: "Heatsink OverTemp",
|
||||||
|
9: "Thermister OverTemp",
|
||||||
|
10: "Dynamic Brake OverTemp",
|
||||||
|
12: "Hardware OverCurrent",
|
||||||
|
13: "Ground Fault",
|
||||||
|
14: "Ground Warning",
|
||||||
|
15: "Load Loss",
|
||||||
|
17: "Input Phase Loss",
|
||||||
|
18: "Motor PTC Trip",
|
||||||
|
19: "Task Overrun",
|
||||||
|
20: "Torque Prove Speed Band",
|
||||||
|
21: "Output Phase Loss",
|
||||||
|
24: "Decel Inhibit",
|
||||||
|
25: "OverSpeed Limit",
|
||||||
|
26: "Brake Slipped",
|
||||||
|
27: "Torque Prove Conflict",
|
||||||
|
28: "TP Encls Confict",
|
||||||
|
29: "Analog In Loss",
|
||||||
|
33: "Auto Restarts Exhausted",
|
||||||
|
35: "IPM OverCurrent",
|
||||||
|
36: "SW OverCurrent",
|
||||||
|
38: "Phase U to Ground",
|
||||||
|
39: "Phase V to Ground",
|
||||||
|
40: "Phase W to Ground",
|
||||||
|
41: "Phase UV Short",
|
||||||
|
42: "Phase VW Short",
|
||||||
|
43: "Phase WU Short",
|
||||||
|
44: "Phase UNeg to Ground",
|
||||||
|
45: "Phase VNeg to Ground",
|
||||||
|
46: "Phase WNeg to Ground",
|
||||||
|
48: "System Defaulted",
|
||||||
|
49: "Drive Powerup",
|
||||||
|
51: "Clear Fault Queue",
|
||||||
|
55: "Control Board Overtemp",
|
||||||
|
59: "Invalid Code",
|
||||||
|
61: "Shear Pin 1",
|
||||||
|
62: "Shear Pin 2",
|
||||||
|
64: "Drive Overload",
|
||||||
|
66: "OW Torque Level",
|
||||||
|
67: "Pump Off",
|
||||||
|
71: "Port 1 Adapter",
|
||||||
|
72: "Port 2 Adapter",
|
||||||
|
73: "Port 3 Adapter",
|
||||||
|
74: "Port 4 Adapter",
|
||||||
|
75: "Port 5 Adapter",
|
||||||
|
76: "Port 6 Adapter",
|
||||||
|
77: "IR Volts Range",
|
||||||
|
78: "FluxAmps Ref Range",
|
||||||
|
79: "Excessive Load",
|
||||||
|
80: "AutoTune Aborted",
|
||||||
|
81: "Port 1 DPI Loss",
|
||||||
|
82: "Port 2 DPI Loss",
|
||||||
|
83: "Port 3 DPI Loss",
|
||||||
|
84: "Port 4 DPI Loss",
|
||||||
|
85: "Port 5 DPI Loss",
|
||||||
|
86: "Port 6 DPI Loss",
|
||||||
|
87: "IXo Voltage Range",
|
||||||
|
91: "Primary Velocity Feedback Loss",
|
||||||
|
93: "Hardware Enable Check",
|
||||||
|
94: "Alternate Velocity Feedback Loss",
|
||||||
|
95: "Auxiliary Velocity Feedback Loss",
|
||||||
|
96: "Position Feedback Loss",
|
||||||
|
97: "Auto Tach Switch",
|
||||||
|
100: "Parameter Checksum",
|
||||||
|
101: "Power Down NVS Blank",
|
||||||
|
102: "NVS Not Blank",
|
||||||
|
103: "Power Down NVS Incompatible",
|
||||||
|
104: "Power Board Checksum",
|
||||||
|
106: "Incompat MCB-PB",
|
||||||
|
107: "Replaced MCB-PB",
|
||||||
|
108: "Analog Calibration Checksum",
|
||||||
|
110: "Invalid Power Board Data",
|
||||||
|
111: "Power Board Invalid ID",
|
||||||
|
112: "Power Board App Min Version",
|
||||||
|
113: "Tracking DataError",
|
||||||
|
115: "Power Down Table Full",
|
||||||
|
116: "Power Down Entry Too Large",
|
||||||
|
117: "Power Down Data Checksum",
|
||||||
|
118: "Power Board Power Down Checksum",
|
||||||
|
124: "App ID Changed",
|
||||||
|
125: "Using Backup App",
|
||||||
|
134: "Start on Power Up",
|
||||||
|
137: "External Precharge Error",
|
||||||
|
138: "Precharge Open",
|
||||||
|
141: "Autotune Enc Angle",
|
||||||
|
142: "Autotune Speed Restricted",
|
||||||
|
143: "Autotune Current Regulator",
|
||||||
|
144: "Autotune Inertia",
|
||||||
|
145: "Autotune Travel",
|
||||||
|
13035: "Net IO Timeout",
|
||||||
|
13037: "Net IO Timeout"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
plc_tags = {
|
||||||
|
"wellstatus": well_status_codes.get(value, "Invalid Code"),
|
||||||
|
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
|
||||||
|
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"runpermissive": permissive_codes.get(value, "Invalid Code"),
|
||||||
|
"startpermissive": permissive_codes.get(value, "Invalid Code"),
|
||||||
|
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
|
||||||
|
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
|
||||||
|
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
|
||||||
|
}
|
||||||
|
|
||||||
|
return plc_tags.get(plc_tag, "Invalid Tag")
|
||||||
114
Pub_Sub/base.py
Normal file
114
Pub_Sub/base.py
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
import json, os
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
def reboot():
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.info("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.info("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.info("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.info(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.info("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.info("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
logger.info("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.info("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.info("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.info("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
191
Pub_Sub/cameratrailer/thingsboard/v1/cameratrailer_tb_v1.cfg
Normal file
191
Pub_Sub/cameratrailer/thingsboard/v1/cameratrailer_tb_v1.cfg
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
{
|
||||||
|
"controllers": [
|
||||||
|
{
|
||||||
|
"protocol": "virtualcontroller",
|
||||||
|
"name": "cameratrailer",
|
||||||
|
"args": {},
|
||||||
|
"samplePeriod": 10,
|
||||||
|
"expired": 10000
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"groups": [
|
||||||
|
{
|
||||||
|
"name": "default",
|
||||||
|
"uploadInterval": 600,
|
||||||
|
"reference": 45
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"measures": [
|
||||||
|
{
|
||||||
|
"name": "t",
|
||||||
|
"ctrlName": "cameratrailer",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"alarms": [],
|
||||||
|
"misc": {
|
||||||
|
"maxAlarmRecordSz": 2000,
|
||||||
|
"logLvl": "DEBUG",
|
||||||
|
"coms": [
|
||||||
|
{
|
||||||
|
"name": "rs232",
|
||||||
|
"baud": 9600,
|
||||||
|
"bits": 8,
|
||||||
|
"stopbits": 1,
|
||||||
|
"parityChk": "n"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rs485",
|
||||||
|
"baud": 19200,
|
||||||
|
"bits": 8,
|
||||||
|
"stopbits": 1,
|
||||||
|
"parityChk": "n"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"clouds": [
|
||||||
|
{
|
||||||
|
"cacheSize": 10000,
|
||||||
|
"enable": 1,
|
||||||
|
"type": "Standard MQTT",
|
||||||
|
"args": {
|
||||||
|
"host": "thingsboard.cloud",
|
||||||
|
"port": 1883,
|
||||||
|
"clientId": "camera-trailer-",
|
||||||
|
"auth": 1,
|
||||||
|
"tls": 0,
|
||||||
|
"cleanSession": 0,
|
||||||
|
"mqttVersion": "v3.1.1",
|
||||||
|
"keepalive": 60,
|
||||||
|
"key": "",
|
||||||
|
"cert": "",
|
||||||
|
"rootCA": "",
|
||||||
|
"verifyServer": 0,
|
||||||
|
"verifyClient": 0,
|
||||||
|
"username": "assmqtt",
|
||||||
|
"passwd": "assmqtt@1903"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"labels": [
|
||||||
|
{
|
||||||
|
"key": "SN",
|
||||||
|
"value": "GF5022137006251"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "MAC",
|
||||||
|
"value": "00:18:05:1a:e5:36"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "MAC_UPPER",
|
||||||
|
"value": "00:18:05:1A:E5:37"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "MAC_LOWER",
|
||||||
|
"value": "00:18:05:1a:e5:37"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"quickfaas": {
|
||||||
|
"uploadFuncs": [
|
||||||
|
{
|
||||||
|
"qos": 1,
|
||||||
|
"funcName": "sendData",
|
||||||
|
"script": "# Enter your python code.\nimport json\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nfrom mobiuspi_lib.gps import GPS \n\ndef getGPS():\n # Create a gps instance\n gps = GPS()\n\n # Retrieve GPS information\n position_status = gps.get_position_status()\n logger.debug(\"position_status: \")\n logger.debug(position_status)\n latitude = position_status[\"latitude\"].split(\" \")\n longitude = position_status[\"longitude\"].split(\" \")\n lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)\n lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)\n if latitude[2] == \"S\":\n lat_dec = lat_dec * -1\n if longitude[2] == \"W\":\n lon_dec = lon_dec * -1\n #lat_dec = round(lat_dec, 7)\n #lon_dec = round(lon_dec, 7)\n logger.info(\"HERE IS THE GPS COORDS\")\n logger.info(f\"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}\")\n publish(__topic__, json.dumps({\"latitude\":f\"{lat_dec:.8f}\", \"longitude\":f\"{lon_dec:.8f}\"}), __qos__)\n\ndef sendData(message,wizard_api):\n logger.debug(message)\n #publish(__topic__, json.dumps(message), __qos__)\n getGPS()",
|
||||||
|
"name": "sendData",
|
||||||
|
"trigger": "measure_event",
|
||||||
|
"topic": "v1/devices/me/telemetry",
|
||||||
|
"msgType": 0,
|
||||||
|
"groups": [
|
||||||
|
"default"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"qos": 1,
|
||||||
|
"funcName": "sendSnapshot",
|
||||||
|
"script": "# Enter your python code.\nimport json\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nimport requests\nfrom requests.auth import HTTPDigestAuth\nfrom requests.exceptions import ConnectionError\nimport base64\n\ndef getImage():\n with open('./snapshot.jpg', 'wb') as handle:\n resp = requests.get(\"http://192.168.1.97:3097/cgi-bin/SnapshotJPEG?Resolution=640x360\", auth=HTTPDigestAuth(\"ASS\", \"amerus@1903\"), stream=True)\n for block in resp.iter_content(1024):\n if not block:\n break\n handle.write(block)\n \n with open('./snapshot.jpg', 'rb') as image_file:\n encoded_string = base64.b64encode(image_file.read())\n publish(__topic__, json.dumps({\"snapshot\": encoded_string.decode(\"UTF-8\"), \"camera_error\": \"OK\"}), __qos__)\n\n\ndef sendSnapshot(message,wizard_api):\n logger.debug(message)\n try:\n getImage()\n except ConnectionError as ce:\n logger.error(\"Could not connect to Camera\")\n logger.error(ce)\n publish(__topic__, json.dumps({\"camera_error\": f\"Could not connect to camera, check camera connection and power\\n\\n{ce}\"}), __qos__)\n except Exception as e:\n logger.error(\"Could not get image\")\n logger.error(e)\n publish(__topic__, json.dumps({\"camera_error\": f\"Could not connect to camera, check camera connection and power\\n\\n{e}\"}), __qos__)\n ",
|
||||||
|
"name": "snapshot",
|
||||||
|
"trigger": "measure_event",
|
||||||
|
"topic": "v1/devices/me/telemetry",
|
||||||
|
"cloudName": "default",
|
||||||
|
"groups": [
|
||||||
|
"snapshot"
|
||||||
|
],
|
||||||
|
"msgType": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"downloadFuncs": []
|
||||||
|
},
|
||||||
|
"modbusSlave": {
|
||||||
|
"enable": 0,
|
||||||
|
"protocol": "Modbus-TCP",
|
||||||
|
"port": 502,
|
||||||
|
"slaveAddr": 1,
|
||||||
|
"int16Ord": "ab",
|
||||||
|
"int32Ord": "abcd",
|
||||||
|
"float32Ord": "abcd",
|
||||||
|
"maxConnection": 5,
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"iec104Server": {
|
||||||
|
"enable": 0,
|
||||||
|
"cotSize": 2,
|
||||||
|
"port": 2404,
|
||||||
|
"serverList": [
|
||||||
|
{
|
||||||
|
"asduAddr": 1
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"kValue": 12,
|
||||||
|
"wValue": 8,
|
||||||
|
"t0": 15,
|
||||||
|
"t1": 15,
|
||||||
|
"t2": 10,
|
||||||
|
"t3": 20,
|
||||||
|
"maximumLink": 5,
|
||||||
|
"timeSet": 1,
|
||||||
|
"byteOrder": "abcd",
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"opcuaServer": {
|
||||||
|
"enable": 0,
|
||||||
|
"port": 4840,
|
||||||
|
"maximumLink": 5,
|
||||||
|
"securityMode": 0,
|
||||||
|
"identifierType": "String",
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"bindConfig": {
|
||||||
|
"enable": 0,
|
||||||
|
"bind": {
|
||||||
|
"modelId": "",
|
||||||
|
"modelName": "",
|
||||||
|
"srcId": "",
|
||||||
|
"srcName": "",
|
||||||
|
"devId": "",
|
||||||
|
"devName": ""
|
||||||
|
},
|
||||||
|
"varGroups": [],
|
||||||
|
"variables": [],
|
||||||
|
"alerts": []
|
||||||
|
},
|
||||||
|
"southMetadata": {},
|
||||||
|
"bindMetadata": {
|
||||||
|
"version": "",
|
||||||
|
"timestamp": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
205
Pub_Sub/cameratrailer/thingsboard/v2/cameratrailer_tb_v2.cfg
Normal file
205
Pub_Sub/cameratrailer/thingsboard/v2/cameratrailer_tb_v2.cfg
Normal file
File diff suppressed because one or more lines are too long
153
Pub_Sub/cameratrailer/thingsboard/v2/pub/sendData.py
Normal file
153
Pub_Sub/cameratrailer/thingsboard/v2/pub/sendData.py
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
import json, os
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from mobiuspi_lib.gps import GPS
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
|
||||||
|
def reboot():
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.info("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.info("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.info("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.info(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.info("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.info("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
logger.info("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.info("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.info("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.info("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
def getGPS():
|
||||||
|
# Create a gps instance
|
||||||
|
gps = GPS()
|
||||||
|
|
||||||
|
# Retrieve GPS information
|
||||||
|
position_status = gps.get_position_status()
|
||||||
|
logger.debug("position_status: ")
|
||||||
|
logger.debug(position_status)
|
||||||
|
latitude = position_status["latitude"].split(" ")
|
||||||
|
longitude = position_status["longitude"].split(" ")
|
||||||
|
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
|
||||||
|
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
|
||||||
|
if latitude[2] == "S":
|
||||||
|
lat_dec = lat_dec * -1
|
||||||
|
if longitude[2] == "W":
|
||||||
|
lon_dec = lon_dec * -1
|
||||||
|
#lat_dec = round(lat_dec, 7)
|
||||||
|
#lon_dec = round(lon_dec, 7)
|
||||||
|
logger.info("HERE IS THE GPS COORDS")
|
||||||
|
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
|
||||||
|
speedKnots = position_status["speed"].split(" ")
|
||||||
|
speedMPH = float(speedKnots[0]) * 1.151
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"latitude":f"{lat_dec:.8f}", "longitude":f"{lon_dec:.8f}", "speed": f"{speedMPH:.2f}"}}), __qos__)
|
||||||
|
|
||||||
|
def sendData(message,wizard_api):
|
||||||
|
logger.debug(message)
|
||||||
|
#publish(__topic__, json.dumps(message), __qos__)
|
||||||
|
try:
|
||||||
|
checkCredentialConfig()
|
||||||
|
getGPS()
|
||||||
|
except:
|
||||||
|
logger.error("Could not get gps data!")
|
||||||
|
|
||||||
|
|
||||||
46
Pub_Sub/cameratrailer/thingsboard/v2/pub/sendSnapshot.py
Normal file
46
Pub_Sub/cameratrailer/thingsboard/v2/pub/sendSnapshot.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import json
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from datetime import datetime as dt
|
||||||
|
import requests
|
||||||
|
from requests.auth import HTTPDigestAuth
|
||||||
|
from requests.exceptions import ConnectionError
|
||||||
|
import base64
|
||||||
|
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def getImage():
|
||||||
|
params = convertJSONtoDS(get_params())
|
||||||
|
camera_ip = params["camera_ip"].replace("_", ":")
|
||||||
|
port = params["port"]
|
||||||
|
with open('./snapshot.jpg', 'wb') as handle:
|
||||||
|
resp = requests.get("http://" + camera_ip + ":" + port + "/cgi-bin/SnapshotJPEG?Resolution=640x360", auth=HTTPDigestAuth("ASS", "amerus@1903"), stream=True)
|
||||||
|
for block in resp.iter_content(1024):
|
||||||
|
if not block:
|
||||||
|
break
|
||||||
|
handle.write(block)
|
||||||
|
|
||||||
|
with open('./snapshot.jpg', 'rb') as image_file:
|
||||||
|
encoded_string = base64.b64encode(image_file.read())
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"snapshot": encoded_string.decode("UTF-8"), "camera_error": "OK"}}), __qos__)
|
||||||
|
|
||||||
|
|
||||||
|
def sendSnapshot(message,wizard_api):
|
||||||
|
logger.debug(message)
|
||||||
|
try:
|
||||||
|
getImage()
|
||||||
|
except ConnectionError as ce:
|
||||||
|
logger.error("Could not connect to Camera")
|
||||||
|
logger.error(ce)
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"camera_error": f"Could not connect to camera, check camera connection and power\n\n{ce}"}}), __qos__)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Could not get image")
|
||||||
|
logger.error(e)
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"camera_error": f"Could not connect to camera, check camera connection and power\n\n{e}"}}), __qos__)
|
||||||
|
|
||||||
205
Pub_Sub/cameratrailer/thingsboard/v3/cameratrailer_tb_v3.cfg
Normal file
205
Pub_Sub/cameratrailer/thingsboard/v3/cameratrailer_tb_v3.cfg
Normal file
File diff suppressed because one or more lines are too long
153
Pub_Sub/cameratrailer/thingsboard/v3/pub/sendData.py
Normal file
153
Pub_Sub/cameratrailer/thingsboard/v3/pub/sendData.py
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
import json, os
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from mobiuspi_lib.gps import GPS
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
|
||||||
|
def reboot():
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.info("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.info("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.info("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.info(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.info("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.info("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
logger.info("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.info("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.info("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.info("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
def getGPS():
|
||||||
|
# Create a gps instance
|
||||||
|
gps = GPS()
|
||||||
|
|
||||||
|
# Retrieve GPS information
|
||||||
|
position_status = gps.get_position_status()
|
||||||
|
logger.debug("position_status: ")
|
||||||
|
logger.debug(position_status)
|
||||||
|
latitude = position_status["latitude"].split(" ")
|
||||||
|
longitude = position_status["longitude"].split(" ")
|
||||||
|
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
|
||||||
|
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
|
||||||
|
if latitude[2] == "S":
|
||||||
|
lat_dec = lat_dec * -1
|
||||||
|
if longitude[2] == "W":
|
||||||
|
lon_dec = lon_dec * -1
|
||||||
|
#lat_dec = round(lat_dec, 7)
|
||||||
|
#lon_dec = round(lon_dec, 7)
|
||||||
|
logger.info("HERE IS THE GPS COORDS")
|
||||||
|
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
|
||||||
|
speedKnots = position_status["speed"].split(" ")
|
||||||
|
speedMPH = float(speedKnots[0]) * 1.151
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"latitude":f"{lat_dec:.8f}", "longitude":f"{lon_dec:.8f}", "speed": f"{speedMPH:.2f}"}}), __qos__)
|
||||||
|
|
||||||
|
def sendData(message,wizard_api):
|
||||||
|
logger.debug(message)
|
||||||
|
#publish(__topic__, json.dumps(message), __qos__)
|
||||||
|
try:
|
||||||
|
checkCredentialConfig()
|
||||||
|
getGPS()
|
||||||
|
except:
|
||||||
|
logger.error("Could not get gps data!")
|
||||||
|
|
||||||
|
|
||||||
54
Pub_Sub/cameratrailer/thingsboard/v3/pub/sendSnapshot.py
Normal file
54
Pub_Sub/cameratrailer/thingsboard/v3/pub/sendSnapshot.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import json, time, requests, base64
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from requests.adapters import HTTPAdapter, Retry
|
||||||
|
from requests.auth import HTTPDigestAuth
|
||||||
|
from requests.exceptions import ConnectionError
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def getImage():
|
||||||
|
params = convertDStoJSON(get_params())
|
||||||
|
camera_ip = params["camera_ip"].replace("_", ".")
|
||||||
|
port = params["port"]
|
||||||
|
with open('./snapshot.jpg', 'wb') as handle:
|
||||||
|
with requests.Session() as s:
|
||||||
|
retries = Retry(total = 10, backoff_factor=0.1, status_forcelist=[404,408, 500, 502, 503, 504])
|
||||||
|
s.mount('http://', HTTPAdapter(max_retries=retries))
|
||||||
|
try:
|
||||||
|
resp = s.get("http://" + camera_ip + ":" + port + "/cgi-bin/camctrl?af=on", auth=HTTPDigestAuth("ASS", "amerus@1903"),stream=True)
|
||||||
|
except:
|
||||||
|
logger.error("Did not Auto Focus")
|
||||||
|
time.sleep(2)
|
||||||
|
resp = s.get("http://" + camera_ip + ":" + port + "/cgi-bin/SnapshotJPEG?Resolution=640x360", auth=HTTPDigestAuth("ASS", "amerus@1903"), stream=True)
|
||||||
|
for block in resp.iter_content(1024):
|
||||||
|
if not block:
|
||||||
|
break
|
||||||
|
handle.write(block)
|
||||||
|
|
||||||
|
with open('./snapshot.jpg', 'rb') as image_file:
|
||||||
|
encoded_string = base64.b64encode(image_file.read())
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"snapshot": encoded_string.decode("UTF-8"), "camera_error": "OK"}}), __qos__)
|
||||||
|
|
||||||
|
|
||||||
|
def sendSnapshot(message,wizard_api):
|
||||||
|
logger.debug(message)
|
||||||
|
try:
|
||||||
|
getImage()
|
||||||
|
except ConnectionError as ce:
|
||||||
|
logger.error("Could not connect to Camera")
|
||||||
|
logger.error(ce)
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"camera_error": f"Could not connect to camera (ConnectionError), check camera connection and power\n\n{ce}"}}), __qos__)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Could not get image")
|
||||||
|
logger.error(e)
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"camera_error": f"Could not connect to camera, check camera connection, power, IP Address\n\n{e}"}}), __qos__)
|
||||||
|
|
||||||
205
Pub_Sub/cameratrailer/thingsboard/v4/cameratrailer_tb_v4.cfg
Normal file
205
Pub_Sub/cameratrailer/thingsboard/v4/cameratrailer_tb_v4.cfg
Normal file
File diff suppressed because one or more lines are too long
153
Pub_Sub/cameratrailer/thingsboard/v4/pub/sendData.py
Normal file
153
Pub_Sub/cameratrailer/thingsboard/v4/pub/sendData.py
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
import json, os
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from mobiuspi_lib.gps import GPS
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
|
||||||
|
def reboot():
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.info("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.info("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.info("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.info(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.info("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.info("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
logger.info("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.info("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.info("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.info("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
def getGPS():
|
||||||
|
# Create a gps instance
|
||||||
|
gps = GPS()
|
||||||
|
|
||||||
|
# Retrieve GPS information
|
||||||
|
position_status = gps.get_position_status()
|
||||||
|
logger.debug("position_status: ")
|
||||||
|
logger.debug(position_status)
|
||||||
|
latitude = position_status["latitude"].split(" ")
|
||||||
|
longitude = position_status["longitude"].split(" ")
|
||||||
|
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
|
||||||
|
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
|
||||||
|
if latitude[2] == "S":
|
||||||
|
lat_dec = lat_dec * -1
|
||||||
|
if longitude[2] == "W":
|
||||||
|
lon_dec = lon_dec * -1
|
||||||
|
#lat_dec = round(lat_dec, 7)
|
||||||
|
#lon_dec = round(lon_dec, 7)
|
||||||
|
logger.info("HERE IS THE GPS COORDS")
|
||||||
|
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
|
||||||
|
speedKnots = position_status["speed"].split(" ")
|
||||||
|
speedMPH = float(speedKnots[0]) * 1.151
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"latitude":f"{lat_dec:.8f}", "longitude":f"{lon_dec:.8f}", "speed": f"{speedMPH:.2f}"}}), __qos__)
|
||||||
|
|
||||||
|
def sendData(message,wizard_api):
|
||||||
|
logger.debug(message)
|
||||||
|
#publish(__topic__, json.dumps(message), __qos__)
|
||||||
|
try:
|
||||||
|
checkCredentialConfig()
|
||||||
|
getGPS()
|
||||||
|
except:
|
||||||
|
logger.error("Could not get gps data!")
|
||||||
|
|
||||||
|
|
||||||
54
Pub_Sub/cameratrailer/thingsboard/v4/pub/sendSnapshot.py
Normal file
54
Pub_Sub/cameratrailer/thingsboard/v4/pub/sendSnapshot.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import json, time, requests, base64
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from requests.adapters import HTTPAdapter, Retry
|
||||||
|
from requests.auth import HTTPDigestAuth
|
||||||
|
from requests.exceptions import ConnectionError
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def getImage():
|
||||||
|
params = convertDStoJSON(get_params())
|
||||||
|
camera_ip = params["camera_ip"].replace("_", ".")
|
||||||
|
port = params["port"]
|
||||||
|
with open('./snapshot.jpg', 'wb') as handle:
|
||||||
|
with requests.Session() as s:
|
||||||
|
retries = Retry(total = 10, backoff_factor=0.1, status_forcelist=[404,408, 500, 502, 503, 504])
|
||||||
|
s.mount('http://', HTTPAdapter(max_retries=retries))
|
||||||
|
try:
|
||||||
|
resp = s.get("http://" + camera_ip + ":" + port + "/cgi-bin/camctrl?af=on", auth=HTTPDigestAuth("ASS", "amerus@1903"),stream=True)
|
||||||
|
except:
|
||||||
|
logger.error("Did not Auto Focus")
|
||||||
|
time.sleep(2)
|
||||||
|
resp = s.get("http://" + camera_ip + ":" + port + "/cgi-bin/SnapshotJPEG?Resolution=640x360", auth=HTTPDigestAuth("ASS", "amerus@1903"), stream=True)
|
||||||
|
for block in resp.iter_content(1024):
|
||||||
|
if not block:
|
||||||
|
break
|
||||||
|
handle.write(block)
|
||||||
|
|
||||||
|
with open('./snapshot.jpg', 'rb') as image_file:
|
||||||
|
encoded_string = base64.b64encode(image_file.read())
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"snapshot": encoded_string.decode("UTF-8"), "camera_error": "OK"}}), __qos__)
|
||||||
|
|
||||||
|
|
||||||
|
def sendSnapshot(message,wizard_api):
|
||||||
|
logger.debug(message)
|
||||||
|
try:
|
||||||
|
getImage()
|
||||||
|
except ConnectionError as ce:
|
||||||
|
logger.error("Could not connect to Camera")
|
||||||
|
logger.error(ce)
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"camera_error": f"Could not connect to camera (ConnectionError), check camera connection and power\n\n{ce}"}}), __qos__)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Could not get image")
|
||||||
|
logger.error(e)
|
||||||
|
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"camera_error": f"Could not connect to camera, check camera connection, power, IP Address\n\n{e}"}}), __qos__)
|
||||||
|
|
||||||
209
Pub_Sub/cameratrailer/v2/cameratrailer_tb_v2_bak.cfg
Normal file
209
Pub_Sub/cameratrailer/v2/cameratrailer_tb_v2_bak.cfg
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
{
|
||||||
|
"controllers": [
|
||||||
|
{
|
||||||
|
"protocol": "Virtual Controller",
|
||||||
|
"name": "cameratrailer",
|
||||||
|
"args": {},
|
||||||
|
"endpoint": "",
|
||||||
|
"samplePeriod": 0,
|
||||||
|
"expired": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"protocol": "Virtual Controller",
|
||||||
|
"name": "snapshot",
|
||||||
|
"args": {},
|
||||||
|
"endpoint": "",
|
||||||
|
"samplePeriod": 0,
|
||||||
|
"expired": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"measures": [
|
||||||
|
{
|
||||||
|
"name": "c",
|
||||||
|
"ctrlName": "cameratrailer",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "",
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"bitMap": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "s",
|
||||||
|
"ctrlName": "snapshot",
|
||||||
|
"group": "snapshot",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "",
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"bitMap": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"alarmLables": [
|
||||||
|
"default"
|
||||||
|
],
|
||||||
|
"alarms": [],
|
||||||
|
"groups": [
|
||||||
|
{
|
||||||
|
"name": "default",
|
||||||
|
"uploadInterval": 600
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "snapshot",
|
||||||
|
"uploadInterval": 3600
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"misc": {
|
||||||
|
"maxAlarmRecordSz": 2000,
|
||||||
|
"logLvl": "INFO",
|
||||||
|
"coms": [
|
||||||
|
{
|
||||||
|
"name": "rs232",
|
||||||
|
"baud": 9600,
|
||||||
|
"bits": 8,
|
||||||
|
"stopbits": 1,
|
||||||
|
"parityChk": "n"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rs485",
|
||||||
|
"baud": 9600,
|
||||||
|
"bits": 8,
|
||||||
|
"stopbits": 1,
|
||||||
|
"parityChk": "n"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"clouds": [
|
||||||
|
{
|
||||||
|
"cacheSize": 10000,
|
||||||
|
"enable": 1,
|
||||||
|
"name": "default",
|
||||||
|
"type": "Standard MQTT",
|
||||||
|
"args": {
|
||||||
|
"host": "thingsboard.cloud",
|
||||||
|
"port": 1883,
|
||||||
|
"clientId": "camera-trailer-110",
|
||||||
|
"auth": 1,
|
||||||
|
"tls": 0,
|
||||||
|
"cleanSession": 0,
|
||||||
|
"mqttVersion": "v3.1.1",
|
||||||
|
"keepalive": 60,
|
||||||
|
"key": "",
|
||||||
|
"cert": "",
|
||||||
|
"rootCA": "",
|
||||||
|
"verifyServer": 0,
|
||||||
|
"verifyClient": 0,
|
||||||
|
"username": "assmqtt",
|
||||||
|
"passwd": "assmqtt@1903",
|
||||||
|
"authType": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"quickfaas": {
|
||||||
|
"genericFuncs": [],
|
||||||
|
"uploadFuncs": [
|
||||||
|
{
|
||||||
|
"qos": 1,
|
||||||
|
"funcName": "sendData",
|
||||||
|
"script": "# Enter your python code.\nimport json\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nfrom mobiuspi_lib.gps import GPS \n\ndef getGPS():\n # Create a gps instance\n gps = GPS()\n\n # Retrieve GPS information\n position_status = gps.get_position_status()\n logger.debug(\"position_status: \")\n logger.debug(position_status)\n latitude = position_status[\"latitude\"].split(\" \")\n longitude = position_status[\"longitude\"].split(\" \")\n lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)\n lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)\n if latitude[2] == \"S\":\n lat_dec = lat_dec * -1\n if longitude[2] == \"W\":\n lon_dec = lon_dec * -1\n #lat_dec = round(lat_dec, 7)\n #lon_dec = round(lon_dec, 7)\n logger.info(\"HERE IS THE GPS COORDS\")\n logger.info(f\"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}\")\n speedKnots = position_status[\"speed\"].split(\" \")\n speedMPH = float(speedKnots[0]) * 1.151\n publish(__topic__, json.dumps({\"latitude\":f\"{lat_dec:.8f}\", \"longitude\":f\"{lon_dec:.8f}\", \"speed\": f\"{speedMPH:.2f}\"}), __qos__)\n\ndef sendData(message,wizard_api):\n logger.debug(message)\n #publish(__topic__, json.dumps(message), __qos__)\n try:\n getGPS()\n except:\n logger.error(\"Could not get gps data!\")\n\n ",
|
||||||
|
"name": "sendData",
|
||||||
|
"trigger": "measure_event",
|
||||||
|
"topic": "v1/devices/me/telemetry",
|
||||||
|
"cloudName": "default",
|
||||||
|
"groups": [
|
||||||
|
"default"
|
||||||
|
],
|
||||||
|
"msgType": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"qos": 1,
|
||||||
|
"funcName": "sendSnapshot",
|
||||||
|
"script": "# Enter your python code.\nimport json\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nimport requests\nfrom requests.auth import HTTPDigestAuth\nfrom requests.exceptions import ConnectionError\nimport base64\n\ndef getImage():\n with open('./snapshot.jpg', 'wb') as handle:\n resp = requests.get(\"http://192.168.1.97:3097/cgi-bin/SnapshotJPEG?Resolution=640x360\", auth=HTTPDigestAuth(\"ASS\", \"amerus@1903\"), stream=True)\n for block in resp.iter_content(1024):\n if not block:\n break\n handle.write(block)\n \n with open('./snapshot.jpg', 'rb') as image_file:\n encoded_string = base64.b64encode(image_file.read())\n publish(__topic__, json.dumps({\"snapshot\": encoded_string.decode(\"UTF-8\"), \"camera_error\": \"OK\"}), __qos__)\n\n\ndef sendSnapshot(message,wizard_api):\n logger.debug(message)\n try:\n getImage()\n except ConnectionError as ce:\n logger.error(\"Could not connect to Camera\")\n logger.error(ce)\n publish(__topic__, json.dumps({\"camera_error\": f\"Could not connect to camera, check camera connection and power\\n\\n{ce}\"}), __qos__)\n except Exception as e:\n logger.error(\"Could not get image\")\n logger.error(e)\n publish(__topic__, json.dumps({\"camera_error\": f\"Could not connect to camera, check camera connection and power\\n\\n{e}\"}), __qos__)\n ",
|
||||||
|
"name": "snapshot",
|
||||||
|
"trigger": "measure_event",
|
||||||
|
"topic": "v1/devices/me/telemetry",
|
||||||
|
"cloudName": "default",
|
||||||
|
"groups": [
|
||||||
|
"snapshot"
|
||||||
|
],
|
||||||
|
"msgType": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"downloadFuncs": []
|
||||||
|
},
|
||||||
|
"labels": [
|
||||||
|
{
|
||||||
|
"key": "SN",
|
||||||
|
"value": "GF5022132004824"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "MAC",
|
||||||
|
"value": "00:18:05:19:bf:b2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"modbusSlave": {
|
||||||
|
"enable": 0,
|
||||||
|
"protocol": "Modbus-TCP",
|
||||||
|
"port": 502,
|
||||||
|
"slaveAddr": 1,
|
||||||
|
"int16Ord": "ab",
|
||||||
|
"int32Ord": "abcd",
|
||||||
|
"float32Ord": "abcd",
|
||||||
|
"maxConnection": 5,
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"iec104Server": {
|
||||||
|
"enable": 0,
|
||||||
|
"cotSize": 2,
|
||||||
|
"port": 2404,
|
||||||
|
"serverList": [
|
||||||
|
{
|
||||||
|
"asduAddr": 1
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"kValue": 12,
|
||||||
|
"wValue": 8,
|
||||||
|
"t0": 15,
|
||||||
|
"t1": 15,
|
||||||
|
"t2": 10,
|
||||||
|
"t3": 20,
|
||||||
|
"maximumLink": 5,
|
||||||
|
"timeSet": 1,
|
||||||
|
"byteOrder": "abcd",
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"opcuaServer": {
|
||||||
|
"enable": 0,
|
||||||
|
"port": 4840,
|
||||||
|
"maximumLink": 5,
|
||||||
|
"securityMode": 0,
|
||||||
|
"identifierType": "String",
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"southMetadata": {},
|
||||||
|
"bindMetadata": {
|
||||||
|
"version": "",
|
||||||
|
"timestamp": ""
|
||||||
|
},
|
||||||
|
"bindConfig": {
|
||||||
|
"enable": 0,
|
||||||
|
"bind": {
|
||||||
|
"modelId": "",
|
||||||
|
"modelName": "",
|
||||||
|
"srcId": "",
|
||||||
|
"srcName": "",
|
||||||
|
"devId": "",
|
||||||
|
"devName": ""
|
||||||
|
},
|
||||||
|
"varGroups": [],
|
||||||
|
"variables": [],
|
||||||
|
"alerts": []
|
||||||
|
},
|
||||||
|
"version": "2.2.1"
|
||||||
|
}
|
||||||
196
Pub_Sub/cameratrailer/v2/cameratrailer_tb_v2_test.cfg
Normal file
196
Pub_Sub/cameratrailer/v2/cameratrailer_tb_v2_test.cfg
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
{
|
||||||
|
"controllers": [
|
||||||
|
{
|
||||||
|
"protocol": "Virtual Controller",
|
||||||
|
"name": "cameratrailer",
|
||||||
|
"args": {},
|
||||||
|
"endpoint": "",
|
||||||
|
"samplePeriod": 0,
|
||||||
|
"expired": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"protocol": "Virtual Controller",
|
||||||
|
"name": "snapshot",
|
||||||
|
"args": {},
|
||||||
|
"endpoint": "",
|
||||||
|
"samplePeriod": 0,
|
||||||
|
"expired": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"measures": [
|
||||||
|
{
|
||||||
|
"name": "c",
|
||||||
|
"ctrlName": "cameratrailer",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "",
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"bitMap": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "s",
|
||||||
|
"ctrlName": "snapshot",
|
||||||
|
"group": "snapshot",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "",
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"bitMap": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"alarms": [],
|
||||||
|
"groups": [
|
||||||
|
{
|
||||||
|
"name": "default",
|
||||||
|
"uploadInterval": 600
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "snapshot",
|
||||||
|
"uploadInterval": 3600
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"misc": {
|
||||||
|
"maxAlarmRecordSz": 2000,
|
||||||
|
"logLvl": "INFO",
|
||||||
|
"coms": [
|
||||||
|
{
|
||||||
|
"name": "rs232",
|
||||||
|
"baud": 9600,
|
||||||
|
"bits": 8,
|
||||||
|
"stopbits": 1,
|
||||||
|
"parityChk": "n"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rs485",
|
||||||
|
"baud": 9600,
|
||||||
|
"bits": 8,
|
||||||
|
"stopbits": 1,
|
||||||
|
"parityChk": "n"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"clouds": [
|
||||||
|
{
|
||||||
|
"cacheSize": 10000,
|
||||||
|
"enable": 1,
|
||||||
|
"name": "default",
|
||||||
|
"type": "Standard MQTT",
|
||||||
|
"args": {
|
||||||
|
"host": "thingsboard.cloud",
|
||||||
|
"port": 1883,
|
||||||
|
"clientId": "camera-trailer",
|
||||||
|
"auth": 1,
|
||||||
|
"tls": 0,
|
||||||
|
"cleanSession": 0,
|
||||||
|
"mqttVersion": "v3.1.1",
|
||||||
|
"keepalive": 60,
|
||||||
|
"key": "",
|
||||||
|
"cert": "",
|
||||||
|
"rootCA": "",
|
||||||
|
"verifyServer": 0,
|
||||||
|
"verifyClient": 0,
|
||||||
|
"username": "assmqtt",
|
||||||
|
"passwd": "assmqtt@1903",
|
||||||
|
"authType": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"quickfaas": {
|
||||||
|
"genericFuncs": [],
|
||||||
|
"uploadFuncs": [
|
||||||
|
{
|
||||||
|
"qos": 1,
|
||||||
|
"funcName": "sendData",
|
||||||
|
"script": "# Enter your python code.\nimport json\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nfrom mobiuspi_lib.gps import GPS \n\ndef getGPS():\n # Create a gps instance\n gps = GPS()\n\n # Retrieve GPS information\n position_status = gps.get_position_status()\n logger.debug(\"position_status: \")\n logger.debug(position_status)\n latitude = position_status[\"latitude\"].split(\" \")\n longitude = position_status[\"longitude\"].split(\" \")\n lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)\n lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)\n if latitude[2] == \"S\":\n lat_dec = lat_dec * -1\n if longitude[2] == \"W\":\n lon_dec = lon_dec * -1\n #lat_dec = round(lat_dec, 7)\n #lon_dec = round(lon_dec, 7)\n logger.info(\"HERE IS THE GPS COORDS\")\n logger.info(f\"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}\")\n speedKnots = position_status[\"speed\"].split(\" \")\n speedMPH = float(speedKnots[0]) * 1.151\n publish(__topic__, json.dumps({\"latitude\":f\"{lat_dec:.8f}\", \"longitude\":f\"{lon_dec:.8f}\", \"speed\": f\"{speedMPH:.2f}\"}), __qos__)\n\ndef sendData(message,wizard_api):\n logger.debug(message)\n #publish(__topic__, json.dumps(message), __qos__)\n try:\n getGPS()\n except:\n logger.error(\"Could not get gps data!\")\n\n ",
|
||||||
|
"name": "sendData",
|
||||||
|
"trigger": "measure_event",
|
||||||
|
"topic": "v1/devices/me/telemetry",
|
||||||
|
"cloudName": "default",
|
||||||
|
"groups": [
|
||||||
|
"default"
|
||||||
|
],
|
||||||
|
"msgType": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"qos": 1,
|
||||||
|
"funcName": "sendSnapshot",
|
||||||
|
"script": "# Enter your python code.\nimport json\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nimport requests\nfrom requests.auth import HTTPDigestAuth\nfrom requests.exceptions import ConnectionError\nimport base64\n\ndef getImage():\n with open('./snapshot.jpg', 'wb') as handle:\n resp = requests.get(\"http://192.168.1.97:3097/cgi-bin/SnapshotJPEG?Resolution=640x360\", auth=HTTPDigestAuth(\"ASS\", \"amerus@1903\"), stream=True)\n for block in resp.iter_content(1024):\n if not block:\n break\n handle.write(block)\n \n with open('./snapshot.jpg', 'rb') as image_file:\n encoded_string = base64.b64encode(image_file.read())\n publish(__topic__, json.dumps({\"snapshot\": encoded_string.decode(\"UTF-8\"), \"camera_error\": \"OK\"}), __qos__)\n\n\ndef sendSnapshot(message,wizard_api):\n logger.debug(message)\n try:\n getImage()\n except ConnectionError as ce:\n logger.error(\"Could not connect to Camera\")\n logger.error(ce)\n publish(__topic__, json.dumps({\"camera_error\": f\"Could not connect to camera, check camera connection and power\\n\\n{ce}\"}), __qos__)\n except Exception as e:\n logger.error(\"Could not get image\")\n logger.error(e)\n publish(__topic__, json.dumps({\"camera_error\": f\"Could not connect to camera, check camera connection and power\\n\\n{e}\"}), __qos__)\n ",
|
||||||
|
"name": "snapshot",
|
||||||
|
"trigger": "measure_event",
|
||||||
|
"topic": "v1/devices/me/telemetry",
|
||||||
|
"cloudName": "default",
|
||||||
|
"groups": [
|
||||||
|
"snapshot"
|
||||||
|
],
|
||||||
|
"msgType": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"downloadFuncs": []
|
||||||
|
},
|
||||||
|
"modbusSlave": {
|
||||||
|
"enable": 0,
|
||||||
|
"protocol": "Modbus-TCP",
|
||||||
|
"port": 502,
|
||||||
|
"slaveAddr": 1,
|
||||||
|
"int16Ord": "ab",
|
||||||
|
"int32Ord": "abcd",
|
||||||
|
"float32Ord": "abcd",
|
||||||
|
"maxConnection": 5,
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"iec104Server": {
|
||||||
|
"enable": 0,
|
||||||
|
"cotSize": 2,
|
||||||
|
"port": 2404,
|
||||||
|
"serverList": [
|
||||||
|
{
|
||||||
|
"asduAddr": 1
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"kValue": 12,
|
||||||
|
"wValue": 8,
|
||||||
|
"t0": 15,
|
||||||
|
"t1": 15,
|
||||||
|
"t2": 10,
|
||||||
|
"t3": 20,
|
||||||
|
"maximumLink": 5,
|
||||||
|
"timeSet": 1,
|
||||||
|
"byteOrder": "abcd",
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"opcuaServer": {
|
||||||
|
"enable": 0,
|
||||||
|
"port": 4840,
|
||||||
|
"maximumLink": 5,
|
||||||
|
"securityMode": 0,
|
||||||
|
"identifierType": "String",
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"southMetadata": {},
|
||||||
|
"bindMetadata": {
|
||||||
|
"version": "",
|
||||||
|
"timestamp": ""
|
||||||
|
},
|
||||||
|
"bindConfig": {
|
||||||
|
"enable": 0,
|
||||||
|
"bind": {
|
||||||
|
"modelId": "",
|
||||||
|
"modelName": "",
|
||||||
|
"srcId": "",
|
||||||
|
"srcName": "",
|
||||||
|
"devId": "",
|
||||||
|
"devName": ""
|
||||||
|
},
|
||||||
|
"varGroups": [],
|
||||||
|
"variables": [],
|
||||||
|
"alerts": []
|
||||||
|
},
|
||||||
|
"version": "2.2.1"
|
||||||
|
}
|
||||||
0
Pub_Sub/cameratrailer/v3/cameratrailer_tb_v3.cfg
Normal file
0
Pub_Sub/cameratrailer/v3/cameratrailer_tb_v3.cfg
Normal file
78
Pub_Sub/config_manager.ipynb
Normal file
78
Pub_Sub/config_manager.ipynb
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import convert_config\n",
|
||||||
|
"import os"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"root = \"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/\"\n",
|
||||||
|
"devicetype = \"advvfdipp\"\n",
|
||||||
|
"platform = \"thingsboard\" #\"mistaway\"\n",
|
||||||
|
"startversion = 2\n",
|
||||||
|
"deviceconfig = devicetype + \"_tb_\" +\"v\" + str(startversion) + \".cfg\"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"\n",
|
||||||
|
"convert_config.write_code(root + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/pub\" + \"/sendData.py\", convert_config.get_config_pub(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/\" + deviceconfig))\n",
|
||||||
|
"#convert_config.write_code(root + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/sub\" + \"/receiveCommand.py\", convert_config.get_config_sub(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/\" + deviceconfig))\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"\n",
|
||||||
|
"convert_config.write_config(root + \"/\" + devicetype + \"/thingsboard/v\" + str(startversion + 1) + \"/\" + devicetype + \"_tb_v\" + str(startversion + 1) + \".cfg\", \n",
|
||||||
|
" convert_config.get_config(root + \"/\" + devicetype + \"/thingsboard/v\" + str(startversion) + \"/\" + devicetype + \"_tb_v\" + str(startversion) + \".cfg\"),\n",
|
||||||
|
" root + \"/\" + devicetype + \"/thingsboard/v\" + str(startversion + 1) + \"/pub\" , \n",
|
||||||
|
" root + \"/\" + devicetype + \"/thingsboard/v\" + str(startversion + 1) + \"/sub\")"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3.10.5 ('env-01')",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.10.5"
|
||||||
|
},
|
||||||
|
"orig_nbformat": 4,
|
||||||
|
"vscode": {
|
||||||
|
"interpreter": {
|
||||||
|
"hash": "661929b843193117f8407b47db3d9660d53447b05faf9ee8b39d0697e59e9e99"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
||||||
@@ -1,40 +1,58 @@
|
|||||||
|
from base64 import encode
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
|
|
||||||
def get_config(path):
|
def get_config(path):
|
||||||
with open(path, "r") as f:
|
checkFileExist(path)
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
|
||||||
return json.load(f)
|
return json.load(f)
|
||||||
|
|
||||||
def get_config_pub(path):
|
def get_config_pub(path):
|
||||||
|
checkFileExist(path)
|
||||||
with open(path, "r") as f:
|
with open(path, "r") as f:
|
||||||
codeString = json.load(f)
|
codeString = json.load(f)
|
||||||
return codeString["quickfaas"]["uploadFuncs"][0]["script"]
|
return codeString["quickfaas"]["uploadFuncs"][0]["script"]
|
||||||
|
|
||||||
def get_config_sub(path):
|
def get_config_sub(path):
|
||||||
|
checkFileExist(path)
|
||||||
with open(path, "r") as f:
|
with open(path, "r") as f:
|
||||||
codeString = json.load(f)
|
codeString = json.load(f)
|
||||||
return codeString["quickfaas"]["downloadFuncs"][0]["script"]
|
return codeString["quickfaas"]["downloadFuncs"][0]["script"]
|
||||||
|
|
||||||
def code_to_string(path):
|
def code_to_string(path):
|
||||||
with open(path, "r") as f:
|
checkFileExist(path)
|
||||||
return f.read()
|
try:
|
||||||
|
with open(path, "r") as f:
|
||||||
|
return f.read()
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
def write_code(path, codestr):
|
def write_code(path, codestr):
|
||||||
|
checkFileExist(path)
|
||||||
with open(path, "w") as f:
|
with open(path, "w") as f:
|
||||||
f.write(codestr)
|
f.write(codestr)
|
||||||
|
|
||||||
def write_config(path, config, pub, sub):
|
def write_config(path, config, pubDir, subDir):
|
||||||
|
checkFileExist(path)
|
||||||
with open(path, "w") as f:
|
with open(path, "w") as f:
|
||||||
config["quickfaas"]["uploadFuncs"][0]["script"] = pub
|
if pubDir:
|
||||||
config["quickfaas"]["downloadFuncs"][0]["script"] = sub
|
with os.scandir(pubDir) as it:
|
||||||
|
for ind, entry in enumerate(it):
|
||||||
|
config["quickfaas"]["uploadFuncs"][ind]["script"] = code_to_string(entry.path)
|
||||||
|
if subDir:
|
||||||
|
with os.scandir(subDir) as it:
|
||||||
|
for ind, entry in enumerate(it):
|
||||||
|
config["quickfaas"]["downloadFuncs"][ind]["script"] = code_to_string(entry.path)
|
||||||
|
config["clouds"][0]["args"]["host"] = "hp.henrypump.cloud"
|
||||||
|
config["clouds"][0]["args"]["clientId"] = "unknown"
|
||||||
|
config["clouds"][0]["args"]["username"] = "unknown"
|
||||||
|
config["clouds"][0]["args"]["passwd"] = "unknown"
|
||||||
json.dump(config, f, indent=4)
|
json.dump(config, f, indent=4)
|
||||||
|
|
||||||
root = os.getcwd()
|
def checkFileExist(path):
|
||||||
devicetype = "advvfdipp"
|
if not os.path.exists("/".join(path.split("/")[:-1])):
|
||||||
startversion = 1
|
os.makedirs("/".join(path.split("/")[:-1]))
|
||||||
deviceconfig = "device_supervisor_" + devicetype + "_tb_" +"v" + str(startversion) + ".cfg"
|
open(path, "a").close()
|
||||||
|
if not os.path.exists(path):
|
||||||
#write_code(root + "/" + devicetype + "/v" + str(startversion) + "/pub" + "/sendData.py", get_config_pub(root + "/" + devicetype + "/v" + str(startversion) + "/" + deviceconfig))
|
open(path, "a").close()
|
||||||
#write_code(root + "/" + devicetype + "/v" + str(startversion) + "/sub" + "/receiveCommand.py", get_config_sub(root + "/" + devicetype + "/v" + str(startversion) + "/" + deviceconfig))
|
|
||||||
|
|
||||||
write_config(root + "/" + devicetype + "/thingsboard/v" + str(startversion + 1) + "/device_supervisor_" + devicetype + "v" + str(startversion + 1) + ".cfg", get_config(root + "/" + devicetype + "/thingsboard/v" + str(startversion) + "/device_supervisor_" + devicetype + "_tb_v" + str(startversion) + ".cfg"), code_to_string(root + "/" + devicetype + "/thingsboard/v" + str(startversion + 1) + "/pub" + "/sendData.py"), code_to_string(root + "/" + devicetype + "/thingsboard/v" + str(startversion + 1) + "/sub" + "/receiveCommand.py"))
|
|
||||||
504
Pub_Sub/plcpond/thingsboard/v1/plcpond_tb_v1.cfg
Normal file
504
Pub_Sub/plcpond/thingsboard/v1/plcpond_tb_v1.cfg
Normal file
@@ -0,0 +1,504 @@
|
|||||||
|
{
|
||||||
|
"controllers": [
|
||||||
|
{
|
||||||
|
"protocol": "EtherNet/IP",
|
||||||
|
"name": "plcpond",
|
||||||
|
"args": {},
|
||||||
|
"samplePeriod": 10,
|
||||||
|
"expired": 10000,
|
||||||
|
"endpoint": "192.168.1.12:44818"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"measures": [
|
||||||
|
{
|
||||||
|
"name": "pond_1_level",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_1_Lev",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_1_total_bbls",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_1_Total_Barrels",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_1_hi_alm",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "Pond_1_Hi_Alarm",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_1_hi_spt",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_1_Hi_Setpoint",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "rw",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_1_hi_clr_spt",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_1_Hi_Clr_Setpoint",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "rw",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_1_lo_alm",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "Pond_1_Lo_Alarm",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_1_lo_spt",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_1_Lo_Setpoint",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "rw",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_1_lo_clr_spt",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_1_Lo_Clr_Setpoint",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "rw",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_2_level",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_2_Lev",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_2_total_bbls",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_2_Total_Barrels",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_2_hi_alm",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "Pond_2_Hi_Alarm",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_2_hi_spt",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_2_Hi_Setpoint",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "rw",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_2_hi_clr_spt",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_2_Hi_Clr_Setpoint",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "rw",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "1.0",
|
||||||
|
"offset": "0.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_2_lo_alm",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "Pond_2_Lo_Alarm",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "ro",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_2_lo_spt",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_2_Lo_Setpoint",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "rw",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pond_2_lo_clr_spt",
|
||||||
|
"ctrlName": "plcpond",
|
||||||
|
"group": "default",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Pond_2_Lo_Clr_Setpoint",
|
||||||
|
"decimal": 2,
|
||||||
|
"len": 1,
|
||||||
|
"readWrite": "rw",
|
||||||
|
"unit": "",
|
||||||
|
"desc": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"gain": "",
|
||||||
|
"offset": ""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"alarmLables": [
|
||||||
|
"default"
|
||||||
|
],
|
||||||
|
"alarms": [],
|
||||||
|
"groups": [
|
||||||
|
{
|
||||||
|
"name": "default",
|
||||||
|
"uploadInterval": 600,
|
||||||
|
"reference": 16
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"misc": {
|
||||||
|
"maxAlarmRecordSz": 2000,
|
||||||
|
"logLvl": "INFO",
|
||||||
|
"coms": [
|
||||||
|
{
|
||||||
|
"name": "rs232",
|
||||||
|
"baud": 9600,
|
||||||
|
"bits": 8,
|
||||||
|
"stopbits": 1,
|
||||||
|
"parityChk": "n"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rs485",
|
||||||
|
"baud": 9600,
|
||||||
|
"bits": 8,
|
||||||
|
"stopbits": 1,
|
||||||
|
"parityChk": "n"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"clouds": [
|
||||||
|
{
|
||||||
|
"cacheSize": 100,
|
||||||
|
"enable": 1,
|
||||||
|
"name": "default",
|
||||||
|
"type": "Standard MQTT",
|
||||||
|
"args": {
|
||||||
|
"host": "hp.henrypump.cloud",
|
||||||
|
"port": 1883,
|
||||||
|
"clientId": "hp",
|
||||||
|
"auth": 1,
|
||||||
|
"tls": 0,
|
||||||
|
"cleanSession": 0,
|
||||||
|
"mqttVersion": "v3.1.1",
|
||||||
|
"keepalive": 60,
|
||||||
|
"key": "",
|
||||||
|
"cert": "",
|
||||||
|
"rootCA": "",
|
||||||
|
"verifyServer": 0,
|
||||||
|
"verifyClient": 0,
|
||||||
|
"username": "hp",
|
||||||
|
"passwd": "hp",
|
||||||
|
"authType": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"quickfaas": {
|
||||||
|
"genericFuncs": [],
|
||||||
|
"uploadFuncs": [
|
||||||
|
{
|
||||||
|
"name": "sendData",
|
||||||
|
"trigger": "measure_event",
|
||||||
|
"topic": "v1/devices/me/telemetry",
|
||||||
|
"qos": 1,
|
||||||
|
"groups": [
|
||||||
|
"default"
|
||||||
|
],
|
||||||
|
"funcName": "sendData",
|
||||||
|
"script": "# Enter your python code.\nimport json\nimport time\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\n\n\ndef sendData(message):\n payload = {}\n payload[\"ts\"] = round(time.time() * 1000)\n payload[\"values\"] = {}\n for measure in message[\"measures\"]:\n try:\n logger.debug(measure)\n payload[\"values\"][measure[\"name\"]] = measure[\"value\"]\n except Exception as e:\n logger.error(e)\n publish(__topic__, json.dumps(payload), __qos__)",
|
||||||
|
"msgType": 0,
|
||||||
|
"cloudName": "default"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"downloadFuncs": []
|
||||||
|
},
|
||||||
|
"labels": [
|
||||||
|
{
|
||||||
|
"key": "SN",
|
||||||
|
"value": "GF5022223016120"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "MAC",
|
||||||
|
"value": "00:18:05:21:b2:8a"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"modbusSlave": {
|
||||||
|
"enable": 0,
|
||||||
|
"protocol": "Modbus-TCP",
|
||||||
|
"port": 502,
|
||||||
|
"slaveAddr": 1,
|
||||||
|
"int16Ord": "ab",
|
||||||
|
"int32Ord": "abcd",
|
||||||
|
"float32Ord": "abcd",
|
||||||
|
"maxConnection": 5,
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"modbusRTUSlave": {
|
||||||
|
"enable": 0,
|
||||||
|
"protocol": "Modbus-RTU",
|
||||||
|
"coms": "rs485",
|
||||||
|
"slaveAddr": 1,
|
||||||
|
"int16Ord": "ab",
|
||||||
|
"int32Ord": "abcd",
|
||||||
|
"float32Ord": "abcd",
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"iec104Server": {
|
||||||
|
"enable": 0,
|
||||||
|
"cotSize": 2,
|
||||||
|
"port": 2404,
|
||||||
|
"serverList": [
|
||||||
|
{
|
||||||
|
"asduAddr": 1
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"kValue": 12,
|
||||||
|
"wValue": 8,
|
||||||
|
"t0": 30,
|
||||||
|
"t1": 15,
|
||||||
|
"t2": 10,
|
||||||
|
"t3": 20,
|
||||||
|
"maximumLink": 5,
|
||||||
|
"timeSet": 1,
|
||||||
|
"byteOrder": "abcd",
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"iec104Client": {
|
||||||
|
"enable": 0,
|
||||||
|
"connectType": 2,
|
||||||
|
"serverAddr": "ipower.inhandcloud.cn",
|
||||||
|
"serverPort": 2404,
|
||||||
|
"communicationCode": "",
|
||||||
|
"protocol": 1,
|
||||||
|
"asduAddr": 1,
|
||||||
|
"tls": 0,
|
||||||
|
"mapping_table": {
|
||||||
|
"YX": [],
|
||||||
|
"YC": [],
|
||||||
|
"YK": []
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"opcuaServer": {
|
||||||
|
"enable": 0,
|
||||||
|
"port": 4840,
|
||||||
|
"maximumLink": 5,
|
||||||
|
"securityMode": 0,
|
||||||
|
"identifierType": "String",
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"southMetadata": {},
|
||||||
|
"bindMetadata": {
|
||||||
|
"version": "",
|
||||||
|
"timestamp": ""
|
||||||
|
},
|
||||||
|
"bindConfig": {
|
||||||
|
"enable": 0,
|
||||||
|
"bind": {
|
||||||
|
"modelId": "",
|
||||||
|
"modelName": "",
|
||||||
|
"srcId": "",
|
||||||
|
"srcName": "",
|
||||||
|
"devId": "",
|
||||||
|
"devName": ""
|
||||||
|
},
|
||||||
|
"varGroups": [],
|
||||||
|
"variables": [],
|
||||||
|
"alerts": []
|
||||||
|
},
|
||||||
|
"version": "2.3.1"
|
||||||
|
}
|
||||||
136
Pub_Sub/plcpond/thingsboard/v1/pub/sendData.py
Normal file
136
Pub_Sub/plcpond/thingsboard/v1/pub/sendData.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
# Enter your python code.
|
||||||
|
import json, os
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
def reboot():
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.info("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.info("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.info("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.info(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.info("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.info("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
logger.info("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.info("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.info("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.info("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def sendData(message):
|
||||||
|
payload = {}
|
||||||
|
payload["ts"] = (round(dt.timestamp(dt.now())/600)*600)*1000
|
||||||
|
payload["values"] = {}
|
||||||
|
try:
|
||||||
|
checkCredentialConfig()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
for measure in message["measures"]:
|
||||||
|
try:
|
||||||
|
logger.debug(measure)
|
||||||
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
publish(__topic__, json.dumps(payload), __qos__)
|
||||||
504
Pub_Sub/plcpond/thingsboard/v2/plcpond_tb_v2.cfg
Normal file
504
Pub_Sub/plcpond/thingsboard/v2/plcpond_tb_v2.cfg
Normal file
File diff suppressed because one or more lines are too long
136
Pub_Sub/plcpond/thingsboard/v2/pub/sendData.py
Normal file
136
Pub_Sub/plcpond/thingsboard/v2/pub/sendData.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
# Enter your python code.
|
||||||
|
import json, os
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
def reboot():
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.info("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.info("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.info("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.info(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.info("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.info("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
logger.info("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.info("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.info("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.info("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def sendData(message):
|
||||||
|
payload = {}
|
||||||
|
payload["ts"] = (round(dt.timestamp(dt.now())/600)*600)*1000
|
||||||
|
payload["values"] = {}
|
||||||
|
try:
|
||||||
|
checkCredentialConfig()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
for measure in message["measures"]:
|
||||||
|
try:
|
||||||
|
logger.debug(measure)
|
||||||
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
publish(__topic__, json.dumps(payload), __qos__)
|
||||||
35
Pub_Sub/receiveAttributeResponse.py
Normal file
35
Pub_Sub/receiveAttributeResponse.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import json
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
#v1/devices/me/attributes/response/+
|
||||||
|
def receiveAttributeResponse(topic, payload):
|
||||||
|
#All attributes were requested handle response
|
||||||
|
payload = json.loads(payload)
|
||||||
|
logger.info(topic)
|
||||||
|
logger.info(payload)
|
||||||
|
logger.info(payload["shared"].get("test"))
|
||||||
|
|
||||||
|
|
||||||
|
def updateConfig():
|
||||||
|
#get the stored config file and update the label with the new parameters
|
||||||
|
cfg = dict()
|
||||||
|
with open("/var/user/cfg/device_supervisor/device_supervisor.cfg", "r+") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
labels = cfg.get("labels")
|
||||||
|
if labels:
|
||||||
|
labels = convertDStoJSON(labels)
|
||||||
|
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
9
Pub_Sub/receiveAttributeUpdate.py
Normal file
9
Pub_Sub/receiveAttributeUpdate.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
import json
|
||||||
|
from common.Logger import logger
|
||||||
|
|
||||||
|
#v1/devices/me/attributes
|
||||||
|
def receiveAttributeUpdate(topic, payload):
|
||||||
|
#Attribute was updated/added server side needs to be updated/added locally
|
||||||
|
logger.info(topic)
|
||||||
|
logger.info(json.loads(payload))
|
||||||
|
logger.info(json.loads(payload)["shared"].get("test"))
|
||||||
322
Pub_Sub/valvecontroller/thingsboard/v1/valvecontroller_tb_v1.cfg
Normal file
322
Pub_Sub/valvecontroller/thingsboard/v1/valvecontroller_tb_v1.cfg
Normal file
@@ -0,0 +1,322 @@
|
|||||||
|
{
|
||||||
|
"controllers": [
|
||||||
|
{
|
||||||
|
"protocol": "EtherNet/IP",
|
||||||
|
"name": "valvecontroller",
|
||||||
|
"args": {},
|
||||||
|
"samplePeriod": 2,
|
||||||
|
"expired": 10000,
|
||||||
|
"endpoint": "192.168.1.12:44818"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"measures": [
|
||||||
|
{
|
||||||
|
"ctrlName": "valvecontroller",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Open_Setpoint",
|
||||||
|
"readWrite": "rw",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"group": "default",
|
||||||
|
"decimal": 1,
|
||||||
|
"name": "open_spt",
|
||||||
|
"desc": "",
|
||||||
|
"unit": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"gain": "1.0",
|
||||||
|
"offset": "0.0",
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ctrlName": "valvecontroller",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Close_Setpoint",
|
||||||
|
"readWrite": "rw",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"group": "default",
|
||||||
|
"decimal": 1,
|
||||||
|
"name": "close_spt",
|
||||||
|
"desc": "",
|
||||||
|
"unit": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"gain": "1.0",
|
||||||
|
"offset": "0.0",
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ctrlName": "valvecontroller",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "Open_Valve_Cmd",
|
||||||
|
"readWrite": "rw",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"group": "default",
|
||||||
|
"name": "open_cmd",
|
||||||
|
"desc": "",
|
||||||
|
"unit": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"gain": "1.0",
|
||||||
|
"offset": "0.0",
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"decimal": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ctrlName": "valvecontroller",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "Close_Valve_Cmd",
|
||||||
|
"readWrite": "rw",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"group": "default",
|
||||||
|
"name": "close_cmd",
|
||||||
|
"desc": "",
|
||||||
|
"unit": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"gain": "1.0",
|
||||||
|
"offset": "0.0",
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"decimal": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ctrlName": "valvecontroller",
|
||||||
|
"dataType": "FLOAT",
|
||||||
|
"addr": "Scaled_Tank_Lev",
|
||||||
|
"readWrite": "ro",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"group": "default",
|
||||||
|
"decimal": 1,
|
||||||
|
"name": "tank_level",
|
||||||
|
"desc": "",
|
||||||
|
"unit": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"gain": "1.0",
|
||||||
|
"offset": "0.0",
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ctrlName": "valvecontroller",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "Valve_Open_Status",
|
||||||
|
"readWrite": "ro",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"group": "default",
|
||||||
|
"name": "valve_open_status",
|
||||||
|
"desc": "",
|
||||||
|
"unit": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"gain": "1.0",
|
||||||
|
"offset": "0.0",
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"decimal": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ctrlName": "valvecontroller",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "Valve_Closed_Status",
|
||||||
|
"readWrite": "ro",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"group": "default",
|
||||||
|
"name": "valve_close_status",
|
||||||
|
"desc": "",
|
||||||
|
"unit": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"gain": "1.0",
|
||||||
|
"offset": "0.0",
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"decimal": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ctrlName": "valvecontroller",
|
||||||
|
"dataType": "BIT",
|
||||||
|
"addr": "Valve_Fail",
|
||||||
|
"readWrite": "ro",
|
||||||
|
"uploadType": "periodic",
|
||||||
|
"group": "default",
|
||||||
|
"name": "valve_failure",
|
||||||
|
"desc": "",
|
||||||
|
"unit": "",
|
||||||
|
"transformType": 0,
|
||||||
|
"gain": "1.0",
|
||||||
|
"offset": "0.0",
|
||||||
|
"maxValue": "",
|
||||||
|
"minValue": "",
|
||||||
|
"maxScaleValue": "",
|
||||||
|
"minScaleValue": "",
|
||||||
|
"decimal": 2
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"alarmLables": [
|
||||||
|
"default"
|
||||||
|
],
|
||||||
|
"alarms": [],
|
||||||
|
"groups": [
|
||||||
|
{
|
||||||
|
"name": "default",
|
||||||
|
"uploadInterval": 600,
|
||||||
|
"reference": 8
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"misc": {
|
||||||
|
"maxAlarmRecordSz": 2000,
|
||||||
|
"logLvl": "INFO",
|
||||||
|
"coms": [
|
||||||
|
{
|
||||||
|
"name": "rs232",
|
||||||
|
"baud": 9600,
|
||||||
|
"bits": 8,
|
||||||
|
"stopbits": 1,
|
||||||
|
"parityChk": "n"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rs485",
|
||||||
|
"baud": 9600,
|
||||||
|
"bits": 8,
|
||||||
|
"stopbits": 1,
|
||||||
|
"parityChk": "n"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"clouds": [
|
||||||
|
{
|
||||||
|
"cacheSize": 10000,
|
||||||
|
"enable": 1,
|
||||||
|
"name": "default",
|
||||||
|
"type": "Standard MQTT",
|
||||||
|
"args": {
|
||||||
|
"host": "thingsboard.cloud",
|
||||||
|
"port": 1883,
|
||||||
|
"clientId": "valve-controller",
|
||||||
|
"auth": 1,
|
||||||
|
"tls": 0,
|
||||||
|
"cleanSession": 0,
|
||||||
|
"mqttVersion": "v3.1.1",
|
||||||
|
"keepalive": 60,
|
||||||
|
"key": "",
|
||||||
|
"cert": "",
|
||||||
|
"rootCA": "",
|
||||||
|
"verifyServer": 0,
|
||||||
|
"verifyClient": 0,
|
||||||
|
"username": "faskensmqtt",
|
||||||
|
"passwd": "faskensmqtt@1903",
|
||||||
|
"authType": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"quickfaas": {
|
||||||
|
"genericFuncs": [],
|
||||||
|
"uploadFuncs": [
|
||||||
|
{
|
||||||
|
"qos": 1,
|
||||||
|
"funcName": "sendData",
|
||||||
|
"script": "# Enter your python code.\nimport json\nimport time\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\n\ndef sendData(message):\n logger.debug(message)\n #publish(__topic__, json.dumps(message), __qos__)\n try:\n payload = {\"ts\": int(time.time()*1000), \"values\": {}}\n valve_open = 0\n valve_close = 0\n for measure in message[\"measures\"]:\n if measure[\"name\"] in [\"valve_failure\"]:\n value = convert_int(measure[\"name\"], measure[\"value\"])\n payload[\"values\"][measure[\"name\"]] = value\n elif measure[\"name\"] == \"valve_open_status\":\n valve_open = measure[\"value\"]\n elif measure[\"name\"] == \"valve_close_status\":\n valve_close = measure[\"value\"]\n elif \"_cmd\" in measure[\"name\"]:\n pass\n else:\n payload[\"values\"][measure[\"name\"]] = measure[\"value\"]\n if valve_open:\n payload[\"values\"][\"valve_status\"] = 1\n elif valve_close:\n payload[\"values\"][\"valve_status\"] = 0\n else:\n payload[\"values\"][\"valve_status\"] = -1\n publish(__topic__, json.dumps(payload), __qos__)\n except Exception as e:\n logger.error(e)\n\n\ndef convert_int(name, value):\n valve_failure = {\n 0: \"OK\",\n 1: \"Failure\"\n }\n\n names = {\n \"valve_failure\": valve_failure.get(value, \"Invalid Code\")\n }\n return names.get(name, \"Invalid Name\")",
|
||||||
|
"name": "sendData",
|
||||||
|
"trigger": "measure_event",
|
||||||
|
"topic": "v1/devices/me/telemetry",
|
||||||
|
"cloudName": "default",
|
||||||
|
"groups": [
|
||||||
|
"default"
|
||||||
|
],
|
||||||
|
"msgType": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"downloadFuncs": [
|
||||||
|
{
|
||||||
|
"name": "Commands",
|
||||||
|
"topic": "v1/devices/me/rpc/request/+",
|
||||||
|
"qos": 1,
|
||||||
|
"funcName": "receiveCommand",
|
||||||
|
"payload_type": "Plaintext",
|
||||||
|
"script": "# Enter your python code.\nimport json\nimport time\nfrom quickfaas.measure import recall\nfrom common.Logger import logger\n\ndef sync(wizard_api):\n #get new values and send\n payload = {}\n topic = \"v1/devices/me/telemetry\"\n try:\n data = recall()#json.loads(recall().decode(\"utf-8\"))\n except Exception as e:\n logger.error(e)\n logger.info(data)\n for controller in data:\n payload = {\"ts\": int(time.time()*1000), \"values\": {}}\n valve_open = 0\n valve_close = 0\n for measure in message[\"measures\"]:\n if measure[\"name\"] in [\"valve_failure\"]:\n value = convert_int(measure[\"name\"], measure[\"value\"])\n payload[\"values\"][measure[\"name\"]] = value\n elif measure[\"name\"] == \"valve_open_status\":\n valve_open = measure[\"value\"]\n elif measure[\"name\"] == \"valve_close_status\":\n valve_close = measure[\"value\"]\n elif \"_cmd\" in measure[\"name\"]:\n pass\n else:\n payload[\"values\"][measure[\"name\"]] = measure[\"value\"]\n if valve_open:\n payload[\"values\"][\"valve_status\"] = \"Open\"\n elif valve_close:\n payload[\"values\"][\"valve_status\"] = \"Closed\"\n else:\n payload[\"values\"][\"valve_status\"] = \"Unknown\"\n logger.debug(\"Sending on topic: {}\".format(topic))\n logger.debug(\"Sending value: {}\".format(payload))\n wizard_api.mqtt_publish(topic, json.dumps(payload))\ndef writeplctag(value, wizard_api):\n try:\n #value = json.loads(value.replace(\"'\",'\"'))\n logger.debug(value)\n message = {\"valvecontroller\":{value[\"measurement\"]: value[\"value\"]}}\n resp = wizard_api.write_plc_values(message)\n #logger.debug(\"RETURN FROM WRITE: {}\".format(resp))\n return True\n except Exception as e:\n logger.debug(e)\n return False\n \ndef receiveCommand(topic, payload, wizard_api):\n try:\n logger.debug(topic)\n logger.debug(json.loads(payload))\n p = json.loads(payload)\n command = p[\"method\"]\n commands = {\n \"sync\": sync,\n \"writeplctag\": writeplctag,\n } \n if command == \"setPLCTag\":\n result = commands[\"writeplctag\"](p[\"params\"],wizard_api)\n elif command == \"changeSetpoint\":\n try:\n params_type = {\"measurement\": \"pidcontrolmode\", \"value\": p[\"params\"][\"setpointType\"]}\n if params_type[\"value\"]:\n commands[\"writeplctag\"](params_type, wizard_api)\n time.sleep(2)\n except:\n pass\n try:\n modes = {0: \"flowsetpoint\", 1: \"fluidlevelsetpoint\", 2: \"tubingpressuresetpoint\", 3: \"manualfrequencysetpoint\"}\n params_value = {\"value\": p[\"params\"][\"setpointValue\"]}\n if params_value[\"value\"]:\n params_value[\"measurement\"] = modes[getMode()]\n commands[\"writeplctag\"](params_value, wizard_api)\n except Exception as e:\n logger.debug(\"DID NOT WRITE SETPOINT\")\n logger.debug(e)\n \n #logger.debug(command)\n ack(topic.split(\"/\")[-1], wizard_api)\n time.sleep(5)\n sync(wizard_api)\n except Exception as e:\n logger.debug(e)\n \n\ndef ack(msgid, wizard_api):\n #logger.debug(msgid)\n #logger.debug(mac)\n #logger.debug(name)\n #logger.debug(value)\n wizard_api.mqtt_publish(\"v1/devices/me/rpc/response/\" + str(msgid), json.dumps({\"msg\": {\"time\": time.time()}, \"metadata\": \"\", \"msgType\": \"\"}))\n\ndef getMode():\n try:\n data = recall()\n for controller in data:\n for measure in controller[\"measures\"]:\n if measure[\"name\"] == \"pidcontrolmode\":\n return measure[\"value\"]\n except:\n return None\n\ndef convert_int(name, value):\n valve_failure = {\n 0: \"OK\",\n 1: \"Failure\"\n }\n\n names = {\n \"valve_failure\": valve_failure.get(value, \"Invalid Code\")\n }\n return names.get(name, \"Invalid Name\")\n",
|
||||||
|
"msgType": 0,
|
||||||
|
"cloudName": "default",
|
||||||
|
"trigger": "command_event"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"labels": [
|
||||||
|
{
|
||||||
|
"key": "SN",
|
||||||
|
"value": "GF5022215013070"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "MAC",
|
||||||
|
"value": "00:18:05:1f:8d:4c"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"modbusSlave": {
|
||||||
|
"enable": 0,
|
||||||
|
"protocol": "Modbus-TCP",
|
||||||
|
"port": 502,
|
||||||
|
"slaveAddr": 1,
|
||||||
|
"int16Ord": "ab",
|
||||||
|
"int32Ord": "abcd",
|
||||||
|
"float32Ord": "abcd",
|
||||||
|
"maxConnection": 5,
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"iec104Server": {
|
||||||
|
"enable": 0,
|
||||||
|
"cotSize": 2,
|
||||||
|
"port": 2404,
|
||||||
|
"serverList": [
|
||||||
|
{
|
||||||
|
"asduAddr": 1
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"kValue": 12,
|
||||||
|
"wValue": 8,
|
||||||
|
"t0": 15,
|
||||||
|
"t1": 15,
|
||||||
|
"t2": 10,
|
||||||
|
"t3": 20,
|
||||||
|
"maximumLink": 5,
|
||||||
|
"timeSet": 1,
|
||||||
|
"byteOrder": "abcd",
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"opcuaServer": {
|
||||||
|
"enable": 0,
|
||||||
|
"port": 4840,
|
||||||
|
"maximumLink": 5,
|
||||||
|
"securityMode": 0,
|
||||||
|
"identifierType": "String",
|
||||||
|
"mapping_table": []
|
||||||
|
},
|
||||||
|
"southMetadata": {},
|
||||||
|
"bindMetadata": {
|
||||||
|
"version": "",
|
||||||
|
"timestamp": ""
|
||||||
|
},
|
||||||
|
"bindConfig": {
|
||||||
|
"enable": 0,
|
||||||
|
"bind": {
|
||||||
|
"modelId": "",
|
||||||
|
"modelName": "",
|
||||||
|
"srcId": "",
|
||||||
|
"srcName": "",
|
||||||
|
"devId": "",
|
||||||
|
"devName": ""
|
||||||
|
},
|
||||||
|
"varGroups": [],
|
||||||
|
"variables": [],
|
||||||
|
"alerts": []
|
||||||
|
}
|
||||||
|
}
|
||||||
161
Pub_Sub/valvecontroller/thingsboard/v2/pub/sendData.py
Normal file
161
Pub_Sub/valvecontroller/thingsboard/v2/pub/sendData.py
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
# Enter your python code.
|
||||||
|
import json, os
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
def reboot():
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.info("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.info("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.info("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.info(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.info("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.info("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
logger.info("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.info("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.info("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.info("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
def sendData(message):
|
||||||
|
logger.debug(message)
|
||||||
|
try:
|
||||||
|
checkCredentialConfig()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
try:
|
||||||
|
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
||||||
|
valve_open = 0
|
||||||
|
valve_close = 0
|
||||||
|
for measure in message["measures"]:
|
||||||
|
if measure["name"] in ["valve_failure"]:
|
||||||
|
value = convert_int(measure["name"], measure["value"])
|
||||||
|
payload["values"][measure["name"]] = value
|
||||||
|
elif measure["name"] == "valve_open_status":
|
||||||
|
valve_open = measure["value"]
|
||||||
|
elif measure["name"] == "valve_close_status":
|
||||||
|
valve_close = measure["value"]
|
||||||
|
elif "_cmd" in measure["name"]:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
|
if valve_open:
|
||||||
|
payload["values"]["valve_status"] = 1
|
||||||
|
elif valve_close:
|
||||||
|
payload["values"]["valve_status"] = 0
|
||||||
|
else:
|
||||||
|
payload["values"]["valve_status"] = -1
|
||||||
|
publish(__topic__, json.dumps(payload), __qos__)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_int(name, value):
|
||||||
|
valve_failure = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Failure"
|
||||||
|
}
|
||||||
|
|
||||||
|
names = {
|
||||||
|
"valve_failure": valve_failure.get(value, "Invalid Code")
|
||||||
|
}
|
||||||
|
return names.get(name, "Invalid Name")
|
||||||
104
Pub_Sub/valvecontroller/thingsboard/v2/sub/receiveCommand.py
Normal file
104
Pub_Sub/valvecontroller/thingsboard/v2/sub/receiveCommand.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
import json, time
|
||||||
|
from quickfaas.measure import recall, write
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from common.Logger import logger
|
||||||
|
|
||||||
|
def sync():
|
||||||
|
#get new values and send
|
||||||
|
payload = {}
|
||||||
|
topic = "v1/devices/me/telemetry"
|
||||||
|
try:
|
||||||
|
data = recall()#json.loads(recall().decode("utf-8"))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
logger.info(data)
|
||||||
|
for controller in data:
|
||||||
|
payload = {"ts": int(time.time()*1000), "values": {}}
|
||||||
|
valve_open = 0
|
||||||
|
valve_close = 0
|
||||||
|
for measure in controller["measures"]:
|
||||||
|
if measure["name"] in ["valve_failure"]:
|
||||||
|
value = convert_int(measure["name"], measure["value"])
|
||||||
|
payload["values"][measure["name"]] = value
|
||||||
|
elif measure["name"] == "valve_open_status":
|
||||||
|
valve_open = measure["value"]
|
||||||
|
elif measure["name"] == "valve_close_status":
|
||||||
|
valve_close = measure["value"]
|
||||||
|
elif "_cmd" in measure["name"]:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
|
if valve_open:
|
||||||
|
payload["values"]["valve_status"] = "Open"
|
||||||
|
elif valve_close:
|
||||||
|
payload["values"]["valve_status"] = "Closed"
|
||||||
|
else:
|
||||||
|
payload["values"]["valve_status"] = "Unknown"
|
||||||
|
logger.debug("Sending on topic: {}".format(topic))
|
||||||
|
logger.debug("Sending value: {}".format(payload))
|
||||||
|
publish(topic, json.dumps(payload))
|
||||||
|
def writeplctag(value):
|
||||||
|
try:
|
||||||
|
#value = json.loads(value.replace("'",'"'))
|
||||||
|
logger.debug(value)
|
||||||
|
message = [{"name": "valvecontroller", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
|
||||||
|
resp = write(message)
|
||||||
|
logger.debug("RETURN FROM WRITE: {}".format(resp))
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def receiveCommand(topic, payload):
|
||||||
|
try:
|
||||||
|
logger.debug(topic)
|
||||||
|
logger.debug(json.loads(payload))
|
||||||
|
p = json.loads(payload)
|
||||||
|
command = p["method"]
|
||||||
|
commands = {
|
||||||
|
"sync": sync,
|
||||||
|
"writeplctag": writeplctag,
|
||||||
|
}
|
||||||
|
if command == "setPLCTag":
|
||||||
|
try:
|
||||||
|
result = commands["writeplctag"](p["params"])
|
||||||
|
logger.debug(result)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
|
||||||
|
|
||||||
|
#logger.debug(command)
|
||||||
|
ack(topic.split("/")[-1])
|
||||||
|
time.sleep(5)
|
||||||
|
sync()
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(e)
|
||||||
|
|
||||||
|
|
||||||
|
def ack(msgid):
|
||||||
|
#logger.debug(msgid)
|
||||||
|
#logger.debug(mac)
|
||||||
|
#logger.debug(name)
|
||||||
|
#logger.debug(value)
|
||||||
|
publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}))
|
||||||
|
|
||||||
|
def getMode():
|
||||||
|
try:
|
||||||
|
data = recall()
|
||||||
|
for controller in data:
|
||||||
|
for measure in controller["measures"]:
|
||||||
|
if measure["name"] == "pidcontrolmode":
|
||||||
|
return measure["value"]
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def convert_int(name, value):
|
||||||
|
valve_failure = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Failure"
|
||||||
|
}
|
||||||
|
|
||||||
|
names = {
|
||||||
|
"valve_failure": valve_failure.get(value, "Invalid Code")
|
||||||
|
}
|
||||||
|
return names.get(name, "Invalid Name")
|
||||||
322
Pub_Sub/valvecontroller/thingsboard/v2/valvecontroller_tb_v2.cfg
Normal file
322
Pub_Sub/valvecontroller/thingsboard/v2/valvecontroller_tb_v2.cfg
Normal file
File diff suppressed because one or more lines are too long
BIN
device_supervisor-V2.0.1.tar.gz
Normal file
BIN
device_supervisor-V2.0.1.tar.gz
Normal file
Binary file not shown.
BIN
device_supervisor-V2.1.1.tar.gz
Normal file
BIN
device_supervisor-V2.1.1.tar.gz
Normal file
Binary file not shown.
BIN
device_supervisor-V2.3.1.tar.gz
Normal file
BIN
device_supervisor-V2.3.1.tar.gz
Normal file
Binary file not shown.
Reference in New Issue
Block a user