added several drivers
This commit is contained in:
BIN
Pub_Sub/.DS_Store
vendored
BIN
Pub_Sub/.DS_Store
vendored
Binary file not shown.
Binary file not shown.
@@ -17,29 +17,51 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"root = \"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub\"\n",
|
||||
"devicetype = \"advvfdipp\"\n",
|
||||
"devicetype = \"plcfreshwater_advvfdipp\"\n",
|
||||
"platform = \"thingsboard\" #\"mistaway\"\n",
|
||||
"platform_short = \"tb\" if platform == \"thingsboard\" else \"ma\" \n",
|
||||
"startversion = 2\n",
|
||||
"startversion = 1\n",
|
||||
"deviceconfig = devicetype + \"_\" + platform_short + \"_\" +\"v\" + str(startversion) + \".cfg\"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"checking path exists: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1/plcfreshwater_advvfdipp_tb_v1.cfg\n",
|
||||
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1\n",
|
||||
"Write Code Path: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1/pub/sendData.py\n",
|
||||
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1/pub\n",
|
||||
"Path didn't exist creating path: /Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1/pub\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"\n",
|
||||
"convert_config.write_code(root + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/pub\" + \"/sendData.py\", convert_config.get_config_pub(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/\" + deviceconfig))\n",
|
||||
"convert_config.write_code(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/pub\" + \"/sendData.py\", convert_config.get_config_pub(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/\" + deviceconfig))\n",
|
||||
"#convert_config.write_code(root + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/sub\" + \"/receiveCommand.py\", convert_config.get_config_sub(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion) + \"/\" + deviceconfig))\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v1\n",
|
||||
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v2\n",
|
||||
"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v2/pub\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"\n",
|
||||
"convert_config.write_config(root + \"/\" + devicetype + \"/\" + platform + \"/v\" + str(startversion + 1) + \"/\" + devicetype + \"_\" + platform_short + \"_v\" + str(startversion + 1) + \".cfg\", \n",
|
||||
@@ -65,7 +87,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.5 | packaged by conda-forge | (main, Jun 14 2022, 07:05:37) [Clang 13.0.1 ]"
|
||||
"version": "3.10.5"
|
||||
},
|
||||
"orig_nbformat": 4,
|
||||
"vscode": {
|
||||
|
||||
@@ -9,6 +9,7 @@ def get_config(path):
|
||||
return json.load(f)
|
||||
|
||||
def get_config_pub(path):
|
||||
print("checking path exists: " + path)
|
||||
checkFileExist(path)
|
||||
with open(path, "r") as f:
|
||||
codeString = json.load(f)
|
||||
@@ -29,6 +30,8 @@ def code_to_string(path):
|
||||
return False
|
||||
|
||||
def write_code(path, codestr):
|
||||
print("Write Code Path: " + path)
|
||||
#print(codestr)
|
||||
checkFileExist(path)
|
||||
with open(path, "w") as f:
|
||||
f.write(codestr)
|
||||
@@ -53,12 +56,16 @@ def write_config(path, config, pubDir, subDir):
|
||||
json.dump(config, f, indent=4)
|
||||
|
||||
def checkFileExist(path):
|
||||
print("/".join(path.split("/")[:-1]))
|
||||
if not os.path.exists("/".join(path.split("/")[:-1])):
|
||||
print("Path didn't exist creating path: " + "/".join(path.split("/")[:-1]))
|
||||
os.makedirs("/".join(path.split("/")[:-1]))
|
||||
open(path, "a").close()
|
||||
if not os.path.exists(path):
|
||||
print("Path did not exist creating path: " + path)
|
||||
open(path, "a").close()
|
||||
|
||||
def checkFolderExist(path):
|
||||
if not os.path.exists(path):
|
||||
print("Making folder" + path)
|
||||
os.makedirs(path)
|
||||
@@ -0,0 +1,21 @@
|
||||
MeasuringPointName,ControllerName,GroupName,UploadType,DataType,Address,Decimal,Len,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit
|
||||
pump_1_daily_total,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_1_Daily_Flow_Rate_Total,2,1,ro,,,none,,,,,,,,
|
||||
pump_1_run_status,Pond_A,dual_flowmeter,periodic,BIT,Pump_1_Run_Status,2,1,ro,,,none,,,,,,,,
|
||||
pump_1_flowrate,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_1_SCL_Flow_Meter,2,1,ro,,,none,,,,,,,,
|
||||
pump_1_yesterdays_total,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_1_Yesterdays_Total,2,1,ro,,,none,,,,,,,,
|
||||
pump_1_prevmonth_total,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_1_PrevMonth_Total,2,1,ro,,,none,,,,,,,,
|
||||
pump_1_month_total,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_1_Current_Month_Total,2,1,ro,,,none,,,,,,,,
|
||||
pump_1_lifetime_total,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_1_Lifetime_Flow,2,1,ro,,,none,,,,,,,,
|
||||
pump_1_suction,Pond_A,dual_flowmeter,periodic,FLOAT,Suction_PSI_TP1_Scaled,2,1,ro,,,none,,,,,,,,
|
||||
pump_2_daily_total,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_2_Daily_Flow_Rate_Total,2,1,ro,,,none,,,,,,,,
|
||||
pump_2_run_status,Pond_A,dual_flowmeter,periodic,BIT,Pump_2_Run_Status,2,1,ro,,,none,,,,,,,,
|
||||
pump_2_flowrate,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_2_SCL_Flow_Meter,2,1,ro,,,none,,,,,,,,
|
||||
pump_2_yesterdays_total,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_2_Yesterdays_Total,2,1,ro,,,none,,,,,,,,
|
||||
pump_2_prevmonth_total,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_2_PrevMonth_Total,2,1,ro,,,none,,,,,,,,
|
||||
pump_2_month_total,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_2_Current_Month_Total,2,1,ro,,,none,,,,,,,,
|
||||
pump_2_lifetime_total,Pond_A,dual_flowmeter,periodic,FLOAT,Pump_2_Lifetime_Flow,2,1,ro,,,none,,,,,,,,
|
||||
pump_2_suction,Pond_A,dual_flowmeter,periodic,FLOAT,Suction_PSI_TP2_Scaled,2,1,ro,,,none,,,,,,,,
|
||||
pump_charge_psi_tp1,Pond_A,dual_flowmeter,periodic,FLOAT,Charge_PSI_TP1_Scaled,2,1,ro,,,none,,,,,,,,
|
||||
pond_1_height,Pond_A,dual_flowmeter,periodic,FLOAT,Pond_level_TP1_Scaled,2,1,ro,,,none,,,,,,,,
|
||||
pond_1_volume,Pond_A,dual_flowmeter,periodic,FLOAT,pond1Volume,2,1,ro,,,none,,,,,,,,
|
||||
charge_pump_run_status,Pond_A,dual_flowmeter,periodic,BIT,CHARGE_PUMP_Run_Status,2,1,ro,,,none,,,,,,,,
|
||||
|
@@ -7,116 +7,183 @@ from quickfaas.global_dict import get as get_params
|
||||
from quickfaas.global_dict import _set_global_args
|
||||
from pycomm3 import CIPDriver
|
||||
|
||||
|
||||
def reboot():
|
||||
def reboot(reason="Rebooting for config file update"):
|
||||
#basic = Basic()
|
||||
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||
logger.info(reason)
|
||||
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||
logger.info(f"REBOOT : {r}")
|
||||
|
||||
def checkFileExist(filename):
|
||||
path = "/var/user/files"
|
||||
if not os.path.exists(path):
|
||||
logger.info("no folder making files folder in var/user")
|
||||
os.makedirs(path)
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
if not os.path.exists(path + "/" + filename):
|
||||
logger.info("no creds file making creds file")
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
try:
|
||||
if not os.path.exists(path):
|
||||
logger.debug("no folder making files folder in var/user")
|
||||
os.makedirs(path)
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
except Exception as e:
|
||||
logger.error(f"Something went wrong in checkFileExist while making folder: {e}")
|
||||
|
||||
try:
|
||||
if not os.path.exists(path + "/" + filename):
|
||||
logger.debug("no creds file making creds file")
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
except Exception as e:
|
||||
logger.error(f"Something went wrong in checkFileExist wihle making file: {e}")
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
try:
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
except Exception as e:
|
||||
logger.error(f"Something went wrong in convertDStoJSON: {e}")
|
||||
return j
|
||||
|
||||
def convertJSONtoDS(j):
|
||||
d = []
|
||||
for key in j.keys():
|
||||
d.append({"key": key, "value": j[key]})
|
||||
try:
|
||||
for key in j.keys():
|
||||
d.append({"key": key, "value": j[key]})
|
||||
except Exception as e:
|
||||
logger.error(f"Something went wrong in convertJSONtoDS: {e}")
|
||||
return d
|
||||
|
||||
def checkCredentialConfig():
|
||||
logger.info("CHECKING CONFIG")
|
||||
logger.debug("CHECKING CONFIG")
|
||||
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||
credspath = "/var/user/files/creds.json"
|
||||
cfg = dict()
|
||||
with open(cfgpath, "r") as f:
|
||||
cfg = json.load(f)
|
||||
clouds = cfg.get("clouds")
|
||||
logger.info(clouds)
|
||||
#if not configured then try to configure from stored values
|
||||
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
logger.info("updating config with stored data")
|
||||
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||
clouds[0]["args"]["username"] = creds["userName"]
|
||||
clouds[0]["args"]["passwd"] = creds["password"]
|
||||
cfg["clouds"] = clouds
|
||||
try:
|
||||
cfg = json.load(f)
|
||||
clouds = cfg.get("clouds")
|
||||
logger.debug(clouds)
|
||||
#if not configured then try to configure from stored values
|
||||
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||
try:
|
||||
checkFileExist("creds.json")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in checkFileExist: {e}")
|
||||
with open(credspath, "r") as c:
|
||||
try:
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
logger.debug("updating config with stored data")
|
||||
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||
clouds[0]["args"]["username"] = creds["userName"]
|
||||
clouds[0]["args"]["passwd"] = creds["password"]
|
||||
cfg["clouds"] = clouds
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
reboot()
|
||||
except Exception as e:
|
||||
logger.error(f"Error trying to load credentials from file: {e}")
|
||||
else:
|
||||
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
logger.debug("updating stored file with new data")
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
reboot()
|
||||
else:
|
||||
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
logger.info("updating stored file with new data")
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
else:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
else:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
with open(credspath, "w") as cw:
|
||||
json.dump(creds,cw)
|
||||
with open(credspath, "w") as cw:
|
||||
json.dump(creds,cw)
|
||||
except Exception as e:
|
||||
logger.error(f"Somethign went wrong in checkCredentialConfig: {e}")
|
||||
|
||||
def checkParameterConfig(cfg):
|
||||
logger.info("Checking Parameters!!!!")
|
||||
paramspath = "/var/user/files/params.json"
|
||||
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||
#check stored values
|
||||
checkFileExist("params.json")
|
||||
with open(paramspath, "r") as f:
|
||||
logger.info("Opened param storage file")
|
||||
params = json.load(f)
|
||||
if params:
|
||||
if cfgparams != params:
|
||||
#go through each param
|
||||
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||
#if key in cfg but not in params copy to params
|
||||
logger.info("equalizing params between cfg and stored")
|
||||
for key in cfgparams.keys():
|
||||
try:
|
||||
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||
try:
|
||||
logger.debug("Checking Parameters!!!!")
|
||||
paramspath = "/var/user/files/params.json"
|
||||
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||
#check stored values
|
||||
checkFileExist("params.json")
|
||||
with open(paramspath, "r") as f:
|
||||
logger.debug("Opened param storage file")
|
||||
params = json.load(f)
|
||||
if params:
|
||||
if cfgparams != params:
|
||||
#go through each param
|
||||
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||
#if key in cfg but not in params copy to params
|
||||
logger.debug("equalizing params between cfg and stored")
|
||||
for key in cfgparams.keys():
|
||||
try:
|
||||
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||
params[key] = cfgparams[key]
|
||||
except:
|
||||
params[key] = cfgparams[key]
|
||||
except:
|
||||
params[key] = cfgparams[key]
|
||||
cfg["labels"] = convertJSONtoDS(params)
|
||||
_set_global_args(convertJSONtoDS(params))
|
||||
cfg["labels"] = convertJSONtoDS(params)
|
||||
_set_global_args(convertJSONtoDS(params))
|
||||
with open(paramspath, "w") as p:
|
||||
json.dump(params, p)
|
||||
else:
|
||||
with open(paramspath, "w") as p:
|
||||
json.dump(params, p)
|
||||
else:
|
||||
with open(paramspath, "w") as p:
|
||||
logger.info("initializing param file with params in memory")
|
||||
json.dump(convertDStoJSON(get_params()), p)
|
||||
cfg["labels"] = get_params()
|
||||
|
||||
return cfg
|
||||
logger.debug("initializing param file with params in memory")
|
||||
json.dump(convertDStoJSON(get_params()), p)
|
||||
cfg["labels"] = get_params()
|
||||
|
||||
return cfg
|
||||
except Exception as e:
|
||||
logger.error(f"Something went wrong in checkParameterConfig: {e}")
|
||||
os.system(f'rm {paramspath}')
|
||||
return cfg
|
||||
|
||||
payload = {}
|
||||
|
||||
def get_totalizers():
|
||||
try:
|
||||
with open("/var/user/files/totalizers.json", "r") as t:
|
||||
totalizers = json.load(t)
|
||||
if not totalizers:
|
||||
logger.info("-----INITIALIZING TOTALIZERS-----")
|
||||
totalizers = {
|
||||
"day": 0,
|
||||
"week": 0,
|
||||
"month": 0,
|
||||
"year": 0,
|
||||
"lifetime": 0,
|
||||
"dayHolding": 0,
|
||||
"weekHolding": 0,
|
||||
"monthHolding": 0,
|
||||
"yearHolding": 0
|
||||
}
|
||||
except:
|
||||
totalizers = {
|
||||
"day": 0,
|
||||
"week": 0,
|
||||
"month": 0,
|
||||
"year": 0,
|
||||
"lifetime": 0,
|
||||
"dayHolding": 0,
|
||||
"weekHolding": 0,
|
||||
"monthHolding": 0,
|
||||
"yearHolding": 0
|
||||
}
|
||||
return totalizers
|
||||
|
||||
def saveTotalizers(totalizers):
|
||||
try:
|
||||
with open("/var/user/files/totalizers.json", "w") as t:
|
||||
json.dump(totalizers,t)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
def getGPS():
|
||||
# Create a gps instance
|
||||
@@ -140,25 +207,111 @@ def getGPS():
|
||||
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
|
||||
speedKnots = position_status["speed"].split(" ")
|
||||
speedMPH = float(speedKnots[0]) * 1.151
|
||||
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"latitude":f"{lat_dec:.8f}", "longitude":f"{lon_dec:.8f}", "speed": f"{speedMPH:.2f}"}}), __qos__)
|
||||
return (f"{lat_dec:.8f}",f"{lon_dec:.8f}",f"{speedMPH:.2f}")
|
||||
|
||||
def sendData(message,wizard_api):
|
||||
logger.debug(message)
|
||||
#publish(__topic__, json.dumps(message), __qos__)
|
||||
def getFlowmeterData(address):
|
||||
try:
|
||||
#checkCredentialConfig()
|
||||
with CIPDriver('192.168.1.223') as fm:
|
||||
with CIPDriver(address) as fm:
|
||||
resp = fm.generic_message(service=b"\x0E", #get_single_attribute
|
||||
class_code=b"\x04", #assembly
|
||||
instance=b"\x64", #assembly 100 which is a 40byte array
|
||||
attribute=b"\x03", #similar to function code in modbus
|
||||
route_path=True)
|
||||
print("Bytes", resp[1])
|
||||
logger.info("Volume Flow", struct.unpack('f',resp[1][8:12])[0]) #volume flow is byte 9 - 12
|
||||
logger.info("Totalizer 1",struct.unpack('f',resp[1][28:32])[0]) #totalizer 1 is byte 29 - 32
|
||||
logger.info("Totalizer 2", struct.unpack('f',resp[1][32:36])[0]) #totalizer 2 is byte 33 - 36
|
||||
logger.info("Totalizer 3", struct.unpack('f',resp[1][36:40])[0]) #totalizer 3 is byte 37 - 40
|
||||
logger.debug("Bytes", resp[1])
|
||||
message = {}
|
||||
message["totalizer_1"] = struct.unpack('f',resp[1][28:32])[0] #totalizer 1 is byte 29 - 32
|
||||
message["totalizer_2"] = struct.unpack('f',resp[1][32:36])[0] #totalizer 2 is byte 33 - 36
|
||||
message["totalizer_3"] = struct.unpack('f',resp[1][36:40])[0] #totalizer 3 is byte 37 - 40
|
||||
message["flowrate"] = struct.unpack('f',resp[1][8:12])[0] #volume flow is byte 9 - 12
|
||||
return message
|
||||
except:
|
||||
logger.error("Could not get gps data!")
|
||||
logger.error("Could not get flowmeter data!")
|
||||
|
||||
def sendData(message,wizard_api):
|
||||
logger.debug(message)
|
||||
checkCredentialConfig()
|
||||
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
||||
resetPayload = {"ts": "", "values": {}}
|
||||
dayReset, weekReset, monthReset, yearReset = False, False, False, False
|
||||
message = getFlowmeterData('192.168.1.91') #TODO set from parameters
|
||||
for measure in message.keys():
|
||||
try:
|
||||
if measure in ["totalizer_1"]:
|
||||
payload["values"]["day_volume"], dayReset = totalizeDay(message[measure])
|
||||
payload["values"]["week_volume"], weekReset = totalizeWeek(message[measure])
|
||||
payload["values"]["month_volume"], monthReset = totalizeMonth(message[measure])
|
||||
payload["values"]["year_volume"], yearReset = totalizeYear(message[measure])
|
||||
payload["values"][measure] = message[measure]
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
try:
|
||||
payload["values"]["latitude"], payload["values"]["longitude"], payload["values"]["speed"] = getGPS()
|
||||
except:
|
||||
logger.error("Could not get GPS coordinates")
|
||||
publish(__topic__, json.dumps(payload), __qos__)
|
||||
|
||||
if dayReset:
|
||||
resetPayload["values"]["yesterday_volume"] = payload["values"]["day_volume"]
|
||||
resetPayload["values"]["day_volume"] = 0
|
||||
if weekReset:
|
||||
resetPayload["values"]["last_week_volume"] = payload["values"]["week_volume"]
|
||||
resetPayload["values"]["week_volume"] = 0
|
||||
if monthReset:
|
||||
resetPayload["values"]["last_month_volume"] = payload["values"]["month_volume"]
|
||||
resetPayload["values"]["month_volume"] = 0
|
||||
if yearReset:
|
||||
resetPayload["values"]["last_year_volume"] = payload["values"]["year_volume"]
|
||||
resetPayload["values"]["year_volume"] = 0
|
||||
|
||||
if resetPayload["values"]:
|
||||
resetPayload["ts"] = 1 + (round(dt.timestamp(dt.now())/600)*600)*1000
|
||||
publish(__topic__, json.dumps(resetPayload), __qos__)
|
||||
|
||||
def totalizeDay(lifetime):
|
||||
totalizers = get_totalizers()
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
value = lifetime - totalizers["dayHolding"]
|
||||
if not int(now.strftime("%d")) == int(totalizers["day"]):
|
||||
totalizers["dayHolding"] = lifetime
|
||||
totalizers["day"] = int(now.strftime("%d"))
|
||||
saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value,reset)
|
||||
|
||||
def totalizeWeek(lifetime):
|
||||
totalizers = get_totalizers()
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
value = lifetime - totalizers["weekHolding"]
|
||||
if (not now.strftime("%U") == totalizers["week"] and now.strftime("%a") == "Sun") or totalizers["week"] == 0:
|
||||
totalizers["weekHolding"] = lifetime
|
||||
totalizers["week"] = now.strftime("%U")
|
||||
saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
|
||||
def totalizeMonth(lifetime):
|
||||
totalizers = get_totalizers()
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
value = lifetime - totalizers["monthHolding"]
|
||||
if not int(now.strftime("%m")) == int(totalizers["month"]):
|
||||
totalizers["monthHolding"] = lifetime
|
||||
totalizers["month"] = now.strftime("%m")
|
||||
saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value,reset)
|
||||
|
||||
def totalizeYear(lifetime):
|
||||
totalizers = get_totalizers()
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
value = lifetime - totalizers["yearHolding"]
|
||||
if not int(now.strftime("%Y")) == int(totalizers["year"]):
|
||||
totalizers["yearHolding"] = lifetime
|
||||
totalizers["year"] = now.strftime("%Y")
|
||||
saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
|
||||
6643
Pub_Sub/gateway/ratliff_gateway.cfg
Normal file
6643
Pub_Sub/gateway/ratliff_gateway.cfg
Normal file
File diff suppressed because it is too large
Load Diff
336
Pub_Sub/hrflowskid/thingsboard/v1/hrflowskid.cfg
Normal file
336
Pub_Sub/hrflowskid/thingsboard/v1/hrflowskid.cfg
Normal file
File diff suppressed because one or more lines are too long
5
Pub_Sub/hrflowskid/thingsboard/v1/hrflowskid.csv
Normal file
5
Pub_Sub/hrflowskid/thingsboard/v1/hrflowskid.csv
Normal file
@@ -0,0 +1,5 @@
|
||||
MeasuringPointName,ControllerName,GroupName,UploadType,DeadZonePercent,DataType,ArrayIndex,EnableBit,BitIndex,reverseBit,Address,Decimal,Len,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit,Pt,Ct,Mapping_table,TransDecimal,bitMap,msecSample,storageLwTSDB,DataEndianReverse,ReadOffset,ReadLength,WriteOffset,WriteLength,DataParseMethod,BitId
|
||||
pressure_1,hrflowskid,default,periodic,,FLOAT,,,,,Scaled_PSI,2,,ro,,,none,,,,,,,,,,,,,,,0,,,,,,,
|
||||
flowrate_1,hrflowskid,default,periodic,,FLOAT,,,,,Scaled_Flow_Rate,2,,ro,,,none,,,,,,,,,,,,,,,0,,,,,,,
|
||||
total_1_lifetime,hrflowskid,default,periodic,,FLOAT,,,,,Scaled_Lifetime_Flow_Rate,2,,ro,,,none,,,,,,,,,,,,,,,0,,,,,,,
|
||||
valve_1_state,hrflowskid,default,periodic,,FLOAT,,,,,Scaled_Valve_FB,2,,ro,,,none,,,,,,,,,,,,,,,0,,,,,,,
|
||||
|
11
Pub_Sub/plcfreshwater/thingsboard/conocophillips.csv
Normal file
11
Pub_Sub/plcfreshwater/thingsboard/conocophillips.csv
Normal file
@@ -0,0 +1,11 @@
|
||||
MeasuringPointName,ControllerName,GroupName,UploadType,DataType,Address,Decimal,Len,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit
|
||||
scaled_flow_meter,Ratliff_Well_31,default,periodic,FLOAT,Scaled_Flow_Meter,2,1,ro,,,none,,,,,,,,
|
||||
scaled_pressure_transducer,Ratliff_Well_31,default,periodic,FLOAT,Scaled_Pressure_Transducer,2,1,ro,,,none,,,,,,,,
|
||||
raw_hand_input,Ratliff_Well_31,default,periodic,BIT,Raw_Hand_Input,2,1,ro,,,none,,,,,,,,
|
||||
raw_auto_input,Ratliff_Well_31,default,periodic,BIT,Raw_Auto_Input,2,1,ro,,,none,,,,,,,,
|
||||
raw_run_status,Ratliff_Well_31,default,periodic,BIT,Raw_Run_Status,2,1,ro,,,none,,,,,,,,
|
||||
raw_local_start,Ratliff_Well_31,default,periodic,BIT,Raw_Local_Start,2,1,ro,,,none,,,,,,,,
|
||||
lifetime_flow_meter_gal,Ratliff_Well_31,default,periodic,FLOAT,Lifetime_Flow_Meter_Gal,2,1,ro,,,none,,,,,,,,
|
||||
spt_flow_meter_unit,Ratliff_Well_31,default,periodic,BIT,SPT_Flow_Meter_Unit,2,1,ro,,,none,,,,,,,,
|
||||
raw_overload_status,Ratliff_Well_31,default,periodic,BIT,Raw_Overload_Status,2,1,ro,,,none,,,,,,,,
|
||||
CMD_Cloud_Control,Ratliff_Well_31,default,periodic,BIT,CMD_Cloud_Control,2,1,rw,,,none,,,,,,,,
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
161
Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v2/pub/sendData.py
Normal file
161
Pub_Sub/plcfreshwater_advvfdipp/thingsboard/v2/pub/sendData.py
Normal file
@@ -0,0 +1,161 @@
|
||||
import json, os
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from quickfaas.global_dict import get as get_params
|
||||
from quickfaas.global_dict import _set_global_args
|
||||
from datetime import datetime as dt
|
||||
|
||||
def reboot(reason="Rebooting for config file update"):
|
||||
#basic = Basic()
|
||||
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||
logger.info(reason)
|
||||
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||
logger.info(f"REBOOT : {r}")
|
||||
|
||||
def checkFileExist(filename):
|
||||
path = "/var/user/files"
|
||||
try:
|
||||
if not os.path.exists(path):
|
||||
logger.debug("no folder making files folder in var/user")
|
||||
os.makedirs(path)
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
except Exception as e:
|
||||
logger.error(f"Something went wrong in checkFileExist while making folder: {e}")
|
||||
|
||||
try:
|
||||
if not os.path.exists(path + "/" + filename):
|
||||
logger.debug("no creds file making creds file")
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
except Exception as e:
|
||||
logger.error(f"Something went wrong in checkFileExist wihle making file: {e}")
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
try:
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
except Exception as e:
|
||||
logger.error(f"Something went wrong in convertDStoJSON: {e}")
|
||||
return j
|
||||
|
||||
def convertJSONtoDS(j):
|
||||
d = []
|
||||
try:
|
||||
for key in j.keys():
|
||||
d.append({"key": key, "value": j[key]})
|
||||
except Exception as e:
|
||||
logger.error(f"Something went wrong in convertJSONtoDS: {e}")
|
||||
return d
|
||||
|
||||
def checkCredentialConfig():
|
||||
logger.debug("CHECKING CONFIG")
|
||||
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||
credspath = "/var/user/files/creds.json"
|
||||
cfg = dict()
|
||||
with open(cfgpath, "r") as f:
|
||||
try:
|
||||
cfg = json.load(f)
|
||||
clouds = cfg.get("clouds")
|
||||
logger.debug(clouds)
|
||||
#if not configured then try to configure from stored values
|
||||
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||
try:
|
||||
checkFileExist("creds.json")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in checkFileExist: {e}")
|
||||
with open(credspath, "r") as c:
|
||||
try:
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
logger.debug("updating config with stored data")
|
||||
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||
clouds[0]["args"]["username"] = creds["userName"]
|
||||
clouds[0]["args"]["passwd"] = creds["password"]
|
||||
cfg["clouds"] = clouds
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
reboot()
|
||||
except Exception as e:
|
||||
logger.error(f"Error trying to load credentials from file: {e}")
|
||||
else:
|
||||
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
logger.debug("updating stored file with new data")
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
else:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
with open(credspath, "w") as cw:
|
||||
json.dump(creds,cw)
|
||||
except Exception as e:
|
||||
logger.error(f"Somethign went wrong in checkCredentialConfig: {e}")
|
||||
|
||||
def checkParameterConfig(cfg):
|
||||
try:
|
||||
logger.debug("Checking Parameters!!!!")
|
||||
paramspath = "/var/user/files/params.json"
|
||||
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||
#check stored values
|
||||
checkFileExist("params.json")
|
||||
with open(paramspath, "r") as f:
|
||||
logger.debug("Opened param storage file")
|
||||
params = json.load(f)
|
||||
if params:
|
||||
if cfgparams != params:
|
||||
#go through each param
|
||||
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||
#if key in cfg but not in params copy to params
|
||||
logger.debug("equalizing params between cfg and stored")
|
||||
for key in cfgparams.keys():
|
||||
try:
|
||||
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||
params[key] = cfgparams[key]
|
||||
except:
|
||||
params[key] = cfgparams[key]
|
||||
cfg["labels"] = convertJSONtoDS(params)
|
||||
_set_global_args(convertJSONtoDS(params))
|
||||
with open(paramspath, "w") as p:
|
||||
json.dump(params, p)
|
||||
else:
|
||||
with open(paramspath, "w") as p:
|
||||
logger.debug("initializing param file with params in memory")
|
||||
json.dump(convertDStoJSON(get_params()), p)
|
||||
cfg["labels"] = get_params()
|
||||
|
||||
return cfg
|
||||
except Exception as e:
|
||||
logger.error(f"Something went wrong in checkParameterConfig: {e}")
|
||||
os.system(f'rm {paramspath}')
|
||||
return cfg
|
||||
|
||||
def sendData(message):
|
||||
payload = {}
|
||||
payload["ts"] = (round(dt.timestamp(dt.now())/600)*600)*1000
|
||||
payload["values"] = {}
|
||||
try:
|
||||
checkCredentialConfig()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
for measure in message["measures"]:
|
||||
try:
|
||||
logger.debug(measure)
|
||||
payload["values"][measure["name"]] = measure["value"]
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
publish(__topic__, json.dumps(payload), __qos__)
|
||||
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||
BIN
Pub_Sub/rigpump/.DS_Store
vendored
BIN
Pub_Sub/rigpump/.DS_Store
vendored
Binary file not shown.
Reference in New Issue
Block a user