update several drivers
This commit is contained in:
299
Pub_Sub/cameratrailer_mb/thingsboard/cameratrailer_tb_v5.cfg
Normal file
299
Pub_Sub/cameratrailer_mb/thingsboard/cameratrailer_tb_v5.cfg
Normal file
File diff suppressed because one or more lines are too long
161
Pub_Sub/cameratrailer_mb/thingsboard/pub/sendData.py
Normal file
161
Pub_Sub/cameratrailer_mb/thingsboard/pub/sendData.py
Normal file
@@ -0,0 +1,161 @@
|
||||
import json, os
|
||||
from datetime import datetime as dt
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from mobiuspi_lib.gps import GPS
|
||||
from quickfaas.global_dict import get as get_params
|
||||
from quickfaas.global_dict import _set_global_args
|
||||
|
||||
|
||||
def reboot():
|
||||
#basic = Basic()
|
||||
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||
logger.info(f"REBOOT : {r}")
|
||||
|
||||
def checkFileExist(filename):
|
||||
path = "/var/user/files"
|
||||
if not os.path.exists(path):
|
||||
logger.debug("no folder making files folder in var/user")
|
||||
os.makedirs(path)
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
if not os.path.exists(path + "/" + filename):
|
||||
logger.debug("no creds file making creds file")
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
def convertJSONtoDS(j):
|
||||
d = []
|
||||
for key in j.keys():
|
||||
d.append({"key": key, "value": j[key]})
|
||||
return d
|
||||
|
||||
def checkCredentialConfig():
|
||||
logger.debug("CHECKING CONFIG")
|
||||
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||
credspath = "/var/user/files/creds.json"
|
||||
cfg = dict()
|
||||
with open(cfgpath, "r") as f:
|
||||
cfg = json.load(f)
|
||||
clouds = cfg.get("clouds")
|
||||
logger.debug(clouds)
|
||||
#if not configured then try to configure from stored values
|
||||
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
logger.debug("updating config with stored data")
|
||||
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||
clouds[0]["args"]["username"] = creds["userName"]
|
||||
clouds[0]["args"]["passwd"] = creds["password"]
|
||||
cfg["clouds"] = clouds
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
reboot()
|
||||
else:
|
||||
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
logger.debug("updating stored file with new data")
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
else:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
with open(credspath, "w") as cw:
|
||||
json.dump(creds,cw)
|
||||
|
||||
def checkParameterConfig(cfg):
|
||||
logger.debug("Checking Parameters!!!!")
|
||||
paramspath = "/var/user/files/params.json"
|
||||
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||
#check stored values
|
||||
checkFileExist("params.json")
|
||||
with open(paramspath, "r") as f:
|
||||
logger.debug("Opened param storage file")
|
||||
try:
|
||||
params = json.load(f)
|
||||
except Exception as e:
|
||||
logger.error(f"Params files error: {e}")
|
||||
params = {}
|
||||
if params:
|
||||
if cfgparams != params:
|
||||
#go through each param
|
||||
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||
#if key in cfg but not in params copy to params
|
||||
logger.debug("equalizing params between cfg and stored")
|
||||
for key in cfgparams.keys():
|
||||
try:
|
||||
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||
params[key] = cfgparams[key]
|
||||
except:
|
||||
params[key] = cfgparams[key]
|
||||
cfg["labels"] = convertJSONtoDS(params)
|
||||
_set_global_args(convertJSONtoDS(params))
|
||||
with open(paramspath, "w") as p:
|
||||
json.dump(params, p)
|
||||
else:
|
||||
with open(paramspath, "w") as p:
|
||||
logger.debug("initializing param file with params in memory")
|
||||
json.dump(convertDStoJSON(get_params()), p)
|
||||
cfg["labels"] = get_params()
|
||||
|
||||
return cfg
|
||||
|
||||
def getGPS():
|
||||
# Create a gps instance
|
||||
gps = GPS()
|
||||
|
||||
# Retrieve GPS information
|
||||
position_status = gps.get_position_status()
|
||||
logger.debug("position_status: ")
|
||||
logger.debug(position_status)
|
||||
latitude = position_status["latitude"].split(" ")
|
||||
longitude = position_status["longitude"].split(" ")
|
||||
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
|
||||
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
|
||||
if latitude[2] == "S":
|
||||
lat_dec = lat_dec * -1
|
||||
if longitude[2] == "W":
|
||||
lon_dec = lon_dec * -1
|
||||
#lat_dec = round(lat_dec, 7)
|
||||
#lon_dec = round(lon_dec, 7)
|
||||
logger.debug("HERE IS THE GPS COORDS")
|
||||
logger.debug(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
|
||||
speedKnots = position_status["speed"].split(" ")
|
||||
speedMPH = float(speedKnots[0]) * 1.151
|
||||
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"latitude":f"{lat_dec:.8f}", "longitude":f"{lon_dec:.8f}", "speed": f"{speedMPH:.2f}"}}), __qos__)
|
||||
|
||||
def sendData(message,wizard_api):
|
||||
logger.debug(message)
|
||||
#publish(__topic__, json.dumps(message), __qos__)
|
||||
try:
|
||||
checkCredentialConfig()
|
||||
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in checkCredentialConfig: {e}")
|
||||
try:
|
||||
getGPS()
|
||||
except Exception as e:
|
||||
logger.error(f"Could not get gps data!: {e}")
|
||||
|
||||
|
||||
49
Pub_Sub/cameratrailer_mb/thingsboard/pub/sendSnapshot.py
Normal file
49
Pub_Sub/cameratrailer_mb/thingsboard/pub/sendSnapshot.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import json, time, requests, base64
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from quickfaas.global_dict import get as get_params
|
||||
from datetime import datetime as dt
|
||||
from requests.adapters import HTTPAdapter, Retry
|
||||
from requests.auth import HTTPDigestAuth
|
||||
from requests.exceptions import ConnectionError
|
||||
|
||||
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
def getImage():
|
||||
params = convertDStoJSON(get_params())
|
||||
camera_ip = params["camera_ip"].replace("_", ".")
|
||||
port = params["port"]
|
||||
with open('./snapshot.jpg', 'wb') as handle:
|
||||
with requests.Session() as s:
|
||||
retries = Retry(total = 10, backoff_factor=0.1, status_forcelist=[404,408, 500, 502, 503, 504])
|
||||
s.mount('http://', HTTPAdapter(max_retries=retries))
|
||||
resp = s.get("http://" + camera_ip + ":" + port + "/cgi-bin/jpg/image.cgi", auth=HTTPDigestAuth("ASS", "amerus@1903"), stream=True)
|
||||
for block in resp.iter_content(1024):
|
||||
if not block:
|
||||
break
|
||||
handle.write(block)
|
||||
|
||||
with open('./snapshot.jpg', 'rb') as image_file:
|
||||
encoded_string = base64.b64encode(image_file.read())
|
||||
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"snapshot": encoded_string.decode("UTF-8"), "camera_error": "OK"}}), __qos__)
|
||||
|
||||
|
||||
def sendSnapshot(message,wizard_api):
|
||||
logger.debug(message)
|
||||
try:
|
||||
getImage()
|
||||
except ConnectionError as ce:
|
||||
logger.error("Could not connect to Camera")
|
||||
logger.error(ce)
|
||||
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"camera_error": f"Could not connect to camera (ConnectionError), check camera connection and power\n\n{ce}"}}), __qos__)
|
||||
except Exception as e:
|
||||
logger.error("Could not get image")
|
||||
logger.error(e)
|
||||
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"camera_error": f"Could not connect to camera, check camera connection, power, IP Address\n\n{e}"}}), __qos__)
|
||||
|
||||
@@ -40,10 +40,18 @@ chemical_pump_01_rate_offset,ek_facility,default,periodic,,FLOAT,,,,,SPT_Chemica
|
||||
chemical_pump_01_run_status,ek_facility,default,periodic,,BIT,,,,0,FBK_Chemical_Pump_1_Running,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,
|
||||
chemical_pump_auto,ek_facility,default,periodic,,BIT,,,,0,CMD_Chemical_Pump_Auto,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,
|
||||
compressor_ambient_temp,ek_facility,default,periodic,,FLOAT,,,,,Val_Air_Compressor_Ambient_Temp,2,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
compressor_ambient_temp_hi_alm,ek_facility,default,periodic,,BIT,,,,0,N2_RoomTemp.AH,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,
|
||||
compressor_ambient_temp_hihi_alm,ek_facility,default,periodic,,BIT,,,,0,N2_RoomTemp.AHH,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,
|
||||
compressor_ambient_temp_lo_alm,ek_facility,default,periodic,,BIT,,,,0,N2_RoomTemp.AL,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,
|
||||
compressor_ambient_temp_lolo_alm,ek_facility,default,periodic,,BIT,,,,0,N2_RoomTemp.ALL,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,
|
||||
compressor_controller_temp,ek_facility,default,periodic,,FLOAT,,,,,Val_Air_Compressor_Controller_Temp,2,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
compressor_lifetime_run_hours,ek_facility,default,periodic,,FLOAT,,,,,Val_Air_Compressor_Run_Time,2,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
compressor_outlet_pressure,ek_facility,default,periodic,,FLOAT,,,,,Val_Air_Compressor_Outlet_PSI,2,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
compressor_outlet_temp,ek_facility,default,periodic,,FLOAT,,,,,Val_Air_Compressor_Outlet_Element_Temp,2,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
coriolis_density,ek_facility,default,periodic,,FLOAT,,,,,Val_Coriolis_Density,2,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
coriolis_flow_rate,ek_facility,default,periodic,,FLOAT,,,,,Val_Coriolis_FR,2,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
coriolis_lifetime,ek_facility,default,periodic,,FLOAT,,,,,Val_Coriolis_T1,2,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
coriolis_temp,ek_facility,default,periodic,,FLOAT,,,,,Val_Coriolis_Temperature,2,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
ct_200_current,ek_facility,default,periodic,,FLOAT,,,,,CT_200.Scaled_Out,2,,ro,,M-105 Motor Amperage,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
dpi_108a_pressure,ek_facility,default,periodic,,FLOAT,,,,,DPI_108A,2,,ro,,Differential PSI Across Sock Filter,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
dpi_108b_pressure,ek_facility,default,periodic,,FLOAT,,,,,DPI_108B,2,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# Enter your python code.
|
||||
import json, os, time
|
||||
import json, os, time, shutil
|
||||
from datetime import datetime as dt
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
@@ -156,34 +156,110 @@ def chunk_payload(payload, chunk_size=20):
|
||||
for i in range(0, len(chunked_keys), chunk_size):
|
||||
yield {k: payload[k] for k in chunked_keys[i:i+chunk_size]}
|
||||
|
||||
def saveStoredAlarms(alarms):
|
||||
try:
|
||||
with open('/var/user/files/storedAlarms.json', 'w') as f:
|
||||
json.dump(alarms, f, indent=4)
|
||||
except (IOError, OSError, json.JSONEncodeError) as e:
|
||||
logger.error(f"Error saving totalizers to /var/user/files/storedAlarms.json: {e}")
|
||||
|
||||
def getStoredAlarms():
|
||||
storedAlarms = {}
|
||||
try:
|
||||
with open('/var/user/files/storedAlarms.json', 'r') as f:
|
||||
storedAlarms = json.load(f)
|
||||
except FileNotFoundError:
|
||||
storedAlarms = {
|
||||
"compressor_ambient_temp_hihi_alm": -1,
|
||||
"compressor_ambient_temp_hi_alm": -1,
|
||||
"compressor_ambient_temp_lo_alm": -1,
|
||||
"compressor_ambient_temp_lolo_alm": -1
|
||||
}
|
||||
saveStoredAlarms(storedAlarms)
|
||||
except json.JSONDecodeError:
|
||||
timestamp = dt.now().strftime("%Y%m%d_%H%M%S")
|
||||
# Split the file path and insert the timestamp before the extension
|
||||
file_name, file_extension = os.path.splitext('/var/user/files/storedAlarms.json')
|
||||
backup_file_path = f"{file_name}_{timestamp}{file_extension}"
|
||||
shutil.copyfile('/var/user/files/storedAlarms.json', backup_file_path)
|
||||
logger.error(f"Error decoding JSON. A backup of the file is created at {backup_file_path}. Initializing totalizers.")
|
||||
storedAlarms = {
|
||||
"compressor_ambient_temp_hihi_alm": -1,
|
||||
"compressor_ambient_temp_hi_alm": -1,
|
||||
"compressor_ambient_temp_lo_alm": -1,
|
||||
"compressor_ambient_temp_lolo_alm": -1
|
||||
}
|
||||
saveStoredAlarms(storedAlarms)
|
||||
return storedAlarms
|
||||
|
||||
|
||||
def checkAlarms(name, value):
|
||||
#function checks if the alarm has been active for 30 mins, if so it also adds a reset alarm
|
||||
#to renotify users an alarm is present
|
||||
|
||||
storedAlarms = getStoredAlarms()
|
||||
now = time.time()
|
||||
#if currently alarmed but not stored then store alarm
|
||||
if value == 1 and storedAlarms.get(name) == -1:
|
||||
storedAlarms[name] = now
|
||||
saveStoredAlarms(storedAlarms)
|
||||
return None
|
||||
#if currently alarmed and alarm greater than 30 mins then add a reset to payload and reset stored time
|
||||
if value == 1 and now - storedAlarms.get(name) > 1770:
|
||||
storedAlarms[name] = now
|
||||
saveStoredAlarms(storedAlarms)
|
||||
return name
|
||||
# if currently not alarms but storedAlarm is active reset stored alarm
|
||||
if value == 0 and storedAlarms.get(name) > 0:
|
||||
storedAlarms[name] = -1
|
||||
saveStoredAlarms(storedAlarms)
|
||||
return None
|
||||
return None
|
||||
|
||||
def sendData(message):
|
||||
#logger.debug(message)
|
||||
logger.debug(message)
|
||||
"""
|
||||
try:
|
||||
checkCredentialConfig()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
||||
"""
|
||||
now = (round(dt.timestamp(dt.now())/60)*60)*1000
|
||||
payload = {"ts": now, "values": {}}
|
||||
attributes_payload = {}
|
||||
resetAlarms = []
|
||||
alarmResetPayload = {"ts": now-1000, "values": {}}
|
||||
for measure in message["measures"]:
|
||||
try:
|
||||
logger.debug(measure)
|
||||
if measure["health"] == 1:
|
||||
if "_spt" in measure["name"]:
|
||||
attributes_payload[measure["name"]] = measure["value"]
|
||||
if measure["name"] in ["compressor_ambient_temp_hihi_alm","compressor_ambient_temp_hi_alm","compressor_ambient_temp_lo_alm", "compressor_ambient_temp_lolo_alm"]:
|
||||
resetAlarms.append(checkAlarms(measure["name"], measure["value"]))
|
||||
payload["values"][measure["name"]] = measure["value"]
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
logger.debug(payload)
|
||||
"""
|
||||
try:
|
||||
payload["values"]["latitude"], payload["values"]["longitude"], payload["values"]["speed"] = getGPS()
|
||||
except:
|
||||
logger.error("Could not get GPS coordinates")
|
||||
|
||||
for chunk in chunk_payload(payload=payload):
|
||||
"""
|
||||
for alarm in resetAlarms:
|
||||
if alarm:
|
||||
alarmResetPayload["values"][alarm] = 0
|
||||
|
||||
if alarmResetPayload["values"]:
|
||||
publish(__topic__, json.dumps(alarmResetPayload), __qos__)
|
||||
time.sleep(2)
|
||||
for chunk in chunk_payload(payload=payload, chunk_size=20):
|
||||
publish(__topic__, json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
attributes_payload["latestReportTime"] = (round(dt.timestamp(dt.now())/600)*600)*1000
|
||||
attributes_payload["latestReportTime"] = (round(dt.timestamp(dt.now())))*1000
|
||||
attributes_payload["connected"] = 1
|
||||
for chunk in chunk_payload(payload=attributes_payload):
|
||||
publish("v1/devices/me/attributes", json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
190
Pub_Sub/ek_facility/thingsboard/sendDataContinue.py
Normal file
190
Pub_Sub/ek_facility/thingsboard/sendDataContinue.py
Normal file
@@ -0,0 +1,190 @@
|
||||
# Enter your python code.
|
||||
import json, os, time
|
||||
from datetime import datetime as dt
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from quickfaas.global_dict import get as get_params
|
||||
from quickfaas.global_dict import _set_global_args
|
||||
from mobiuspi_lib.gps import GPS
|
||||
|
||||
def reboot():
|
||||
#basic = Basic()
|
||||
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||
logger.info(f"REBOOT : {r}")
|
||||
|
||||
def checkFileExist(filename):
|
||||
path = "/var/user/files"
|
||||
if not os.path.exists(path):
|
||||
logger.info("no folder making files folder in var/user")
|
||||
os.makedirs(path)
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
if not os.path.exists(path + "/" + filename):
|
||||
logger.info("no creds file making creds file")
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
def convertJSONtoDS(j):
|
||||
d = []
|
||||
for key in j.keys():
|
||||
d.append({"key": key, "value": j[key]})
|
||||
return d
|
||||
|
||||
def checkCredentialConfig():
|
||||
logger.info("CHECKING CONFIG")
|
||||
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||
credspath = "/var/user/files/creds.json"
|
||||
cfg = dict()
|
||||
with open(cfgpath, "r") as f:
|
||||
cfg = json.load(f)
|
||||
clouds = cfg.get("clouds")
|
||||
logger.info(clouds)
|
||||
#if not configured then try to configure from stored values
|
||||
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
logger.info("updating config with stored data")
|
||||
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||
clouds[0]["args"]["username"] = creds["userName"]
|
||||
clouds[0]["args"]["passwd"] = creds["password"]
|
||||
cfg["clouds"] = clouds
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
reboot()
|
||||
else:
|
||||
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
logger.info("updating stored file with new data")
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
else:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
with open(credspath, "w") as cw:
|
||||
json.dump(creds,cw)
|
||||
|
||||
def checkParameterConfig(cfg):
|
||||
logger.info("Checking Parameters!!!!")
|
||||
paramspath = "/var/user/files/params.json"
|
||||
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||
#check stored values
|
||||
checkFileExist("params.json")
|
||||
with open(paramspath, "r") as f:
|
||||
logger.info("Opened param storage file")
|
||||
params = json.load(f)
|
||||
if params:
|
||||
if cfgparams != params:
|
||||
#go through each param
|
||||
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||
#if key in cfg but not in params copy to params
|
||||
logger.info("equalizing params between cfg and stored")
|
||||
for key in cfgparams.keys():
|
||||
try:
|
||||
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||
params[key] = cfgparams[key]
|
||||
except:
|
||||
params[key] = cfgparams[key]
|
||||
cfg["labels"] = convertJSONtoDS(params)
|
||||
_set_global_args(convertJSONtoDS(params))
|
||||
with open(paramspath, "w") as p:
|
||||
json.dump(params, p)
|
||||
else:
|
||||
with open(paramspath, "w") as p:
|
||||
logger.info("initializing param file with params in memory")
|
||||
json.dump(convertDStoJSON(get_params()), p)
|
||||
cfg["labels"] = get_params()
|
||||
|
||||
return cfg
|
||||
|
||||
def getGPS():
|
||||
# Create a gps instance
|
||||
gps = GPS()
|
||||
|
||||
# Retrieve GPS information
|
||||
position_status = gps.get_position_status()
|
||||
logger.debug("position_status: ")
|
||||
logger.debug(position_status)
|
||||
latitude = position_status["latitude"].split(" ")
|
||||
longitude = position_status["longitude"].split(" ")
|
||||
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
|
||||
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
|
||||
if latitude[2] == "S":
|
||||
lat_dec = lat_dec * -1
|
||||
if longitude[2] == "W":
|
||||
lon_dec = lon_dec * -1
|
||||
#lat_dec = round(lat_dec, 7)
|
||||
#lon_dec = round(lon_dec, 7)
|
||||
logger.info("HERE IS THE GPS COORDS")
|
||||
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
|
||||
speedKnots = position_status["speed"].split(" ")
|
||||
speedMPH = float(speedKnots[0]) * 1.151
|
||||
return (f"{lat_dec:.8f}",f"{lon_dec:.8f}",f"{speedMPH:.2f}")
|
||||
|
||||
def chunk_payload(payload, chunk_size=20):
|
||||
if "values" in payload:
|
||||
# Original format: {"ts": ..., "values": {...}}
|
||||
chunked_values = list(payload["values"].items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
"ts": payload["ts"],
|
||||
"values": dict(chunked_values[i:i+chunk_size])
|
||||
}
|
||||
else:
|
||||
# New format: {"key1": "value1", "key2": "value2"}
|
||||
chunked_keys = list(payload.keys())
|
||||
for i in range(0, len(chunked_keys), chunk_size):
|
||||
yield {k: payload[k] for k in chunked_keys[i:i+chunk_size]}
|
||||
|
||||
def sendData(message):
|
||||
#logger.debug(message)
|
||||
try:
|
||||
checkCredentialConfig()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
||||
attributes_payload = {}
|
||||
for measure in message["measures"]:
|
||||
try:
|
||||
logger.debug(measure)
|
||||
if measure["health"] == 1:
|
||||
if "_spt" in measure["name"]:
|
||||
attributes_payload[measure["name"]] = measure["value"]
|
||||
payload["values"][measure["name"]] = measure["value"]
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
try:
|
||||
payload["values"]["latitude"], payload["values"]["longitude"], payload["values"]["speed"] = getGPS()
|
||||
except:
|
||||
logger.error("Could not get GPS coordinates")
|
||||
|
||||
for chunk in chunk_payload(payload=payload):
|
||||
publish(__topic__, json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
attributes_payload["latestReportTime"] = (round(dt.timestamp(dt.now())/600)*600)*1000
|
||||
for chunk in chunk_payload(payload=attributes_payload):
|
||||
publish("v1/devices/me/attributes", json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
175156
Pub_Sub/ek_facility/thingsboard/tags.json
Normal file
175156
Pub_Sub/ek_facility/thingsboard/tags.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,191 @@
|
||||
{
|
||||
"controllers": [
|
||||
{
|
||||
"protocol": "virtualcontroller",
|
||||
"name": "cameratrailer",
|
||||
"args": {},
|
||||
"samplePeriod": 10,
|
||||
"expired": 10000
|
||||
}
|
||||
],
|
||||
"groups": [
|
||||
{
|
||||
"name": "default",
|
||||
"uploadInterval": 600,
|
||||
"reference": 45
|
||||
}
|
||||
],
|
||||
"measures": [
|
||||
{
|
||||
"name": "t",
|
||||
"ctrlName": "cameratrailer",
|
||||
"group": "default",
|
||||
"uploadType": "periodic",
|
||||
"dataType": "BIT",
|
||||
"addr": "",
|
||||
"decimal": 2,
|
||||
"len": 1,
|
||||
"readWrite": "ro",
|
||||
"unit": "",
|
||||
"desc": "",
|
||||
"transformType": 0,
|
||||
"maxValue": "",
|
||||
"minValue": "",
|
||||
"maxScaleValue": "",
|
||||
"minScaleValue": "",
|
||||
"gain": "",
|
||||
"offset": ""
|
||||
}
|
||||
],
|
||||
"alarms": [],
|
||||
"misc": {
|
||||
"maxAlarmRecordSz": 2000,
|
||||
"logLvl": "DEBUG",
|
||||
"coms": [
|
||||
{
|
||||
"name": "rs232",
|
||||
"baud": 9600,
|
||||
"bits": 8,
|
||||
"stopbits": 1,
|
||||
"parityChk": "n"
|
||||
},
|
||||
{
|
||||
"name": "rs485",
|
||||
"baud": 19200,
|
||||
"bits": 8,
|
||||
"stopbits": 1,
|
||||
"parityChk": "n"
|
||||
}
|
||||
]
|
||||
},
|
||||
"clouds": [
|
||||
{
|
||||
"cacheSize": 10000,
|
||||
"enable": 1,
|
||||
"type": "Standard MQTT",
|
||||
"args": {
|
||||
"host": "thingsboard.cloud",
|
||||
"port": 1883,
|
||||
"clientId": "camera-trailer-",
|
||||
"auth": 1,
|
||||
"tls": 0,
|
||||
"cleanSession": 0,
|
||||
"mqttVersion": "v3.1.1",
|
||||
"keepalive": 60,
|
||||
"key": "",
|
||||
"cert": "",
|
||||
"rootCA": "",
|
||||
"verifyServer": 0,
|
||||
"verifyClient": 0,
|
||||
"username": "assmqtt",
|
||||
"passwd": "assmqtt@1903"
|
||||
}
|
||||
}
|
||||
],
|
||||
"labels": [
|
||||
{
|
||||
"key": "SN",
|
||||
"value": "GF5022137006251"
|
||||
},
|
||||
{
|
||||
"key": "MAC",
|
||||
"value": "00:18:05:1a:e5:36"
|
||||
},
|
||||
{
|
||||
"key": "MAC_UPPER",
|
||||
"value": "00:18:05:1A:E5:37"
|
||||
},
|
||||
{
|
||||
"key": "MAC_LOWER",
|
||||
"value": "00:18:05:1a:e5:37"
|
||||
}
|
||||
],
|
||||
"quickfaas": {
|
||||
"uploadFuncs": [
|
||||
{
|
||||
"qos": 1,
|
||||
"funcName": "sendData",
|
||||
"script": "# Enter your python code.\nimport json\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nfrom mobiuspi_lib.gps import GPS \n\ndef getGPS():\n # Create a gps instance\n gps = GPS()\n\n # Retrieve GPS information\n position_status = gps.get_position_status()\n logger.debug(\"position_status: \")\n logger.debug(position_status)\n latitude = position_status[\"latitude\"].split(\" \")\n longitude = position_status[\"longitude\"].split(\" \")\n lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)\n lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)\n if latitude[2] == \"S\":\n lat_dec = lat_dec * -1\n if longitude[2] == \"W\":\n lon_dec = lon_dec * -1\n #lat_dec = round(lat_dec, 7)\n #lon_dec = round(lon_dec, 7)\n logger.info(\"HERE IS THE GPS COORDS\")\n logger.info(f\"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}\")\n publish(__topic__, json.dumps({\"latitude\":f\"{lat_dec:.8f}\", \"longitude\":f\"{lon_dec:.8f}\"}), __qos__)\n\ndef sendData(message,wizard_api):\n logger.debug(message)\n #publish(__topic__, json.dumps(message), __qos__)\n getGPS()",
|
||||
"name": "sendData",
|
||||
"trigger": "measure_event",
|
||||
"topic": "v1/devices/me/telemetry",
|
||||
"msgType": 0,
|
||||
"groups": [
|
||||
"default"
|
||||
]
|
||||
},
|
||||
{
|
||||
"qos": 1,
|
||||
"funcName": "sendSnapshot",
|
||||
"script": "# Enter your python code.\nimport json\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nimport requests\nfrom requests.auth import HTTPDigestAuth\nfrom requests.exceptions import ConnectionError\nimport base64\n\ndef getImage():\n with open('./snapshot.jpg', 'wb') as handle:\n resp = requests.get(\"http://192.168.1.97:3097/cgi-bin/SnapshotJPEG?Resolution=640x360\", auth=HTTPDigestAuth(\"ASS\", \"amerus@1903\"), stream=True)\n for block in resp.iter_content(1024):\n if not block:\n break\n handle.write(block)\n \n with open('./snapshot.jpg', 'rb') as image_file:\n encoded_string = base64.b64encode(image_file.read())\n publish(__topic__, json.dumps({\"snapshot\": encoded_string.decode(\"UTF-8\"), \"camera_error\": \"OK\"}), __qos__)\n\n\ndef sendSnapshot(message,wizard_api):\n logger.debug(message)\n try:\n getImage()\n except ConnectionError as ce:\n logger.error(\"Could not connect to Camera\")\n logger.error(ce)\n publish(__topic__, json.dumps({\"camera_error\": f\"Could not connect to camera, check camera connection and power\\n\\n{ce}\"}), __qos__)\n except Exception as e:\n logger.error(\"Could not get image\")\n logger.error(e)\n publish(__topic__, json.dumps({\"camera_error\": f\"Could not connect to camera, check camera connection and power\\n\\n{e}\"}), __qos__)\n ",
|
||||
"name": "snapshot",
|
||||
"trigger": "measure_event",
|
||||
"topic": "v1/devices/me/telemetry",
|
||||
"cloudName": "default",
|
||||
"groups": [
|
||||
"snapshot"
|
||||
],
|
||||
"msgType": 0
|
||||
}
|
||||
],
|
||||
"downloadFuncs": []
|
||||
},
|
||||
"modbusSlave": {
|
||||
"enable": 0,
|
||||
"protocol": "Modbus-TCP",
|
||||
"port": 502,
|
||||
"slaveAddr": 1,
|
||||
"int16Ord": "ab",
|
||||
"int32Ord": "abcd",
|
||||
"float32Ord": "abcd",
|
||||
"maxConnection": 5,
|
||||
"mapping_table": []
|
||||
},
|
||||
"iec104Server": {
|
||||
"enable": 0,
|
||||
"cotSize": 2,
|
||||
"port": 2404,
|
||||
"serverList": [
|
||||
{
|
||||
"asduAddr": 1
|
||||
}
|
||||
],
|
||||
"kValue": 12,
|
||||
"wValue": 8,
|
||||
"t0": 15,
|
||||
"t1": 15,
|
||||
"t2": 10,
|
||||
"t3": 20,
|
||||
"maximumLink": 5,
|
||||
"timeSet": 1,
|
||||
"byteOrder": "abcd",
|
||||
"mapping_table": []
|
||||
},
|
||||
"opcuaServer": {
|
||||
"enable": 0,
|
||||
"port": 4840,
|
||||
"maximumLink": 5,
|
||||
"securityMode": 0,
|
||||
"identifierType": "String",
|
||||
"mapping_table": []
|
||||
},
|
||||
"bindConfig": {
|
||||
"enable": 0,
|
||||
"bind": {
|
||||
"modelId": "",
|
||||
"modelName": "",
|
||||
"srcId": "",
|
||||
"srcName": "",
|
||||
"devId": "",
|
||||
"devName": ""
|
||||
},
|
||||
"varGroups": [],
|
||||
"variables": [],
|
||||
"alerts": []
|
||||
},
|
||||
"southMetadata": {},
|
||||
"bindMetadata": {
|
||||
"version": "",
|
||||
"timestamp": ""
|
||||
}
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -0,0 +1,161 @@
|
||||
import json, os
|
||||
from datetime import datetime as dt
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from mobiuspi_lib.gps import GPS
|
||||
from quickfaas.global_dict import get as get_params
|
||||
from quickfaas.global_dict import _set_global_args
|
||||
|
||||
|
||||
def reboot():
|
||||
#basic = Basic()
|
||||
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||
logger.info(f"REBOOT : {r}")
|
||||
|
||||
def checkFileExist(filename):
|
||||
path = "/var/user/files"
|
||||
if not os.path.exists(path):
|
||||
logger.debug("no folder making files folder in var/user")
|
||||
os.makedirs(path)
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
if not os.path.exists(path + "/" + filename):
|
||||
logger.debug("no creds file making creds file")
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
def convertJSONtoDS(j):
|
||||
d = []
|
||||
for key in j.keys():
|
||||
d.append({"key": key, "value": j[key]})
|
||||
return d
|
||||
|
||||
def checkCredentialConfig():
|
||||
logger.debug("CHECKING CONFIG")
|
||||
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||
credspath = "/var/user/files/creds.json"
|
||||
cfg = dict()
|
||||
with open(cfgpath, "r") as f:
|
||||
cfg = json.load(f)
|
||||
clouds = cfg.get("clouds")
|
||||
logger.debug(clouds)
|
||||
#if not configured then try to configure from stored values
|
||||
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
logger.debug("updating config with stored data")
|
||||
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||
clouds[0]["args"]["username"] = creds["userName"]
|
||||
clouds[0]["args"]["passwd"] = creds["password"]
|
||||
cfg["clouds"] = clouds
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
reboot()
|
||||
else:
|
||||
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
logger.debug("updating stored file with new data")
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
else:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
with open(credspath, "w") as cw:
|
||||
json.dump(creds,cw)
|
||||
|
||||
def checkParameterConfig(cfg):
|
||||
logger.debug("Checking Parameters!!!!")
|
||||
paramspath = "/var/user/files/params.json"
|
||||
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||
#check stored values
|
||||
checkFileExist("params.json")
|
||||
with open(paramspath, "r") as f:
|
||||
logger.debug("Opened param storage file")
|
||||
try:
|
||||
params = json.load(f)
|
||||
except Exception as e:
|
||||
logger.error(f"Params files error: {e}")
|
||||
params = {}
|
||||
if params:
|
||||
if cfgparams != params:
|
||||
#go through each param
|
||||
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||
#if key in cfg but not in params copy to params
|
||||
logger.debug("equalizing params between cfg and stored")
|
||||
for key in cfgparams.keys():
|
||||
try:
|
||||
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||
params[key] = cfgparams[key]
|
||||
except:
|
||||
params[key] = cfgparams[key]
|
||||
cfg["labels"] = convertJSONtoDS(params)
|
||||
_set_global_args(convertJSONtoDS(params))
|
||||
with open(paramspath, "w") as p:
|
||||
json.dump(params, p)
|
||||
else:
|
||||
with open(paramspath, "w") as p:
|
||||
logger.debug("initializing param file with params in memory")
|
||||
json.dump(convertDStoJSON(get_params()), p)
|
||||
cfg["labels"] = get_params()
|
||||
|
||||
return cfg
|
||||
|
||||
def getGPS():
|
||||
# Create a gps instance
|
||||
gps = GPS()
|
||||
|
||||
# Retrieve GPS information
|
||||
position_status = gps.get_position_status()
|
||||
logger.debug("position_status: ")
|
||||
logger.debug(position_status)
|
||||
latitude = position_status["latitude"].split(" ")
|
||||
longitude = position_status["longitude"].split(" ")
|
||||
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
|
||||
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
|
||||
if latitude[2] == "S":
|
||||
lat_dec = lat_dec * -1
|
||||
if longitude[2] == "W":
|
||||
lon_dec = lon_dec * -1
|
||||
#lat_dec = round(lat_dec, 7)
|
||||
#lon_dec = round(lon_dec, 7)
|
||||
logger.debug("HERE IS THE GPS COORDS")
|
||||
logger.debug(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
|
||||
speedKnots = position_status["speed"].split(" ")
|
||||
speedMPH = float(speedKnots[0]) * 1.151
|
||||
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"latitude":f"{lat_dec:.8f}", "longitude":f"{lon_dec:.8f}", "speed": f"{speedMPH:.2f}"}}), __qos__)
|
||||
|
||||
def sendData(message,wizard_api):
|
||||
logger.debug(message)
|
||||
#publish(__topic__, json.dumps(message), __qos__)
|
||||
try:
|
||||
checkCredentialConfig()
|
||||
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in checkCredentialConfig: {e}")
|
||||
try:
|
||||
getGPS()
|
||||
except Exception as e:
|
||||
logger.error(f"Could not get gps data!: {e}")
|
||||
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
import json, time, requests, base64
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from quickfaas.global_dict import get as get_params
|
||||
from datetime import datetime as dt
|
||||
from requests.adapters import HTTPAdapter, Retry
|
||||
from requests.auth import HTTPDigestAuth
|
||||
from requests.exceptions import ConnectionError
|
||||
|
||||
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
def getImage():
|
||||
params = convertDStoJSON(get_params())
|
||||
camera_ip = params["camera_ip"].replace("_", ".")
|
||||
port = params["port"]
|
||||
with open('./snapshot.jpg', 'wb') as handle:
|
||||
with requests.Session() as s:
|
||||
retries = Retry(total = 10, backoff_factor=0.1, status_forcelist=[404,408, 500, 502, 503, 504])
|
||||
s.mount('http://', HTTPAdapter(max_retries=retries))
|
||||
try:
|
||||
resp = s.get("http://" + camera_ip + ":" + port + "/cgi-bin/camctrl?af=on", auth=HTTPDigestAuth("ASS", "amerus@1903"),stream=True)
|
||||
except:
|
||||
logger.error("Did not Auto Focus")
|
||||
time.sleep(2)
|
||||
resp = s.get("http://" + camera_ip + ":" + port + "/cgi-bin/SnapshotJPEG?Resolution=640x360", auth=HTTPDigestAuth("ASS", "amerus@1903"), stream=True)
|
||||
for block in resp.iter_content(1024):
|
||||
if not block:
|
||||
break
|
||||
handle.write(block)
|
||||
|
||||
with open('./snapshot.jpg', 'rb') as image_file:
|
||||
encoded_string = base64.b64encode(image_file.read())
|
||||
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"snapshot": encoded_string.decode("UTF-8"), "camera_error": "OK"}}), __qos__)
|
||||
|
||||
|
||||
def sendSnapshot(message,wizard_api):
|
||||
logger.debug(message)
|
||||
try:
|
||||
getImage()
|
||||
except ConnectionError as ce:
|
||||
logger.error("Could not connect to Camera")
|
||||
logger.error(ce)
|
||||
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"camera_error": f"Could not connect to camera (ConnectionError), check camera connection and power\n\n{ce}"}}), __qos__)
|
||||
except Exception as e:
|
||||
logger.error("Could not get image")
|
||||
logger.error(e)
|
||||
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"camera_error": f"Could not connect to camera, check camera connection, power, IP Address\n\n{e}"}}), __qos__)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -12,7 +12,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -389,7 +389,7 @@
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "tbDataCollector",
|
||||
"display_name": "pycomm",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
@@ -403,7 +403,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.5"
|
||||
"version": "3.13.2"
|
||||
},
|
||||
"orig_nbformat": 4
|
||||
},
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
47
code snippets/test_plctags.py
Normal file
47
code snippets/test_plctags.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import unittest
|
||||
from unittest.mock import MagicMock
|
||||
from getPLCData import LogixDriver # assume this is the name of the file containing the LogixDriver class
|
||||
|
||||
class TestGetPLCTags(unittest.TestCase):
|
||||
|
||||
def test_get_plc_tags(self):
|
||||
# Mock out the LogixDriver connection to a PLC
|
||||
ip_address = "192.168.1.100"
|
||||
plc = MagicMock(spec=LogixDriver)
|
||||
|
||||
# Simulate getting the tag list from the PLC
|
||||
plc.get_tag_list.return_value = ["tag1", "tag2", "tag3"]
|
||||
|
||||
with LogixDriver(ip_address) as plc:
|
||||
tags_json = plc.tags_json
|
||||
|
||||
self.assertEqual(tags_json, {"tags": ["tag1", "tag2", "tag3"]})
|
||||
|
||||
def test_get_plc_tags_fail(self):
|
||||
# Mock out the LogixDriver connection to a PLC
|
||||
ip_address = "192.168.1.100"
|
||||
plc = MagicMock(spec=LogixDriver)
|
||||
|
||||
# Simulate getting an error when trying to connect to the PLC
|
||||
with self.assertRaises(Exception):
|
||||
with LogixDriver(ip_address) as plc:
|
||||
plctags = plc.get_tag_list()
|
||||
|
||||
def test_write_tags_to_file(self):
|
||||
# Mock out the file I/O operations
|
||||
with unittest.mock.patch('builtins.open', create=True, spec='open'):
|
||||
with unittest.mock.patch('json.dump') as mock_dump:
|
||||
|
||||
ip_address = "192.168.1.100"
|
||||
plc = MagicMock(spec=LogixDriver)
|
||||
|
||||
# Simulate getting the tag list from the PLC
|
||||
plc.get_tag_list.return_value = ["tag1", "tag2", "tag3"]
|
||||
|
||||
with LogixDriver(ip_address) as plc:
|
||||
plctags = plc.tags_json
|
||||
|
||||
mock_dump.assert_called_once_with(plctags, unittest.mock.ANY, indent=4)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user