created leak detection
This commit is contained in:
@@ -54,6 +54,12 @@ gas_vru_temp,Peacemaker_Facility,default,periodic,,,FLOAT,,,,,Val_Gas_VRU_Temp,2
|
||||
gas_vru_today_total,Peacemaker_Facility,default,periodic,,,FLOAT,,,,,Val_Gas_VRU_Today,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0,
|
||||
gas_vru_yesterday_total,Peacemaker_Facility,default,periodic,,,FLOAT,,,,,Val_Gas_VRU_Yesterday,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0,
|
||||
gwr_reset_cmd,Peacemaker_Facility,default,periodic,,,BIT,,,,0,CMD_GWR_Reset,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0,
|
||||
heater_water_flow_rate,Peacemaker_Facility,default,periodic,,,FLOAT,,,,,Val_Heater_Water_FR,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0,
|
||||
heater_water_last_month_total,Peacemaker_Facility,default,periodic,,,FLOAT,,,,,Val_Heater_Water_LM,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0,
|
||||
heater_water_month_total,Peacemaker_Facility,default,periodic,,,FLOAT,,,,,Val_Heater_Water_Month,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0,
|
||||
heater_water_t1,Peacemaker_Facility,default,periodic,,,FLOAT,,,,,Val_Heater_Water_T1,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0,
|
||||
heater_water_todays_total,Peacemaker_Facility,default,periodic,,,FLOAT,,,,,Val_Heater_Water_Todays,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0,
|
||||
heater_water_yesterday_total,Peacemaker_Facility,default,periodic,,,FLOAT,,,,,Val_Heater_Water_Yest,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0,
|
||||
lact_2_bsw,Peacemaker_Facility,default,periodic,,,FLOAT,,,,,Val_Lact_2_Meter_BSW,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0,
|
||||
lact_2_density,Peacemaker_Facility,default,periodic,,,FLOAT,,,,,Val_Lact_2_Meter_Density,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0,
|
||||
lact_2_fault_alm,Peacemaker_Facility,default,periodic,,,BIT,,,,0,AL0_Lact_Unit_2_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0,
|
||||
|
||||
|
529
Pub_Sub/cameratrailer_mb/thingsboard/cameratrailer_tb_v8.cfg
Normal file
529
Pub_Sub/cameratrailer_mb/thingsboard/cameratrailer_tb_v8.cfg
Normal file
File diff suppressed because one or more lines are too long
99
Pub_Sub/fk_leak_detection/thingsboard/leak_detection.csv
Normal file
99
Pub_Sub/fk_leak_detection/thingsboard/leak_detection.csv
Normal file
@@ -0,0 +1,99 @@
|
||||
MeasuringPointName,ControllerName,GroupName,UploadType,DeadZoneType,DeadZonePercent,DataType,ArrayIndex,EnableBit,BitIndex,reverseBit,Address,Decimal,Len,CodeType,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit,Pt,Ct,Mapping_table,TransDecimal,bitMap,msecSample,storageLwTSDB,DataEndianReverse,ReadOffset,ReadLength,WriteOffset,WriteLength,DataParseMethod,BitId,pollCycle,EnableRequestCount,RequestCount,,
|
||||
leak_1_0ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_0FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_10ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_10FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_11ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_11FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_12ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_12FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_13ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_13FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_14ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_14FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_15ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_15FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_16ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_16FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_1ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_1FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_2ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_2FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_3ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_3FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_4ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_4FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_5ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_5FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_6ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_6FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_7ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_7FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_8ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_8FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_9ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_9FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_cu_ft,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_CU_Ft,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_cubic_feet_to_barrels,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Cubic_Feet_To_Barrels,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Hi_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_alm_enable,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Hi_Alarm_Enable,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_alm_enabled,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Hi_Alarm_Enabled,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_clr_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Hi_Clr_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_reset,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Hi_Reset,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_set,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Hi_Set,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Hi_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_level,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Lev,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_level_psi,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Lev_Psi,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_lo_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Lo_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_lo_clr_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Lo_Clr_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_lo_reset,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Lo_Reset,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_lo_set,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Lo_Set,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_lo_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Lo_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_pump_off_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Pump_Off_Spt,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_pump_on_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Pump_On_Spt,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_raw_max,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Raw_Max,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_raw_min,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Raw_Min,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_rise_multiplier,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Rise_Multiplier,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_scaled_max,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_ScaledMax,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_scaled_min,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_ScaledMin,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_stage_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Stage_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_total_barrels,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Total_Barrels,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_0ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_0FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_10ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_10FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_11ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_11FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_12ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_12FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_13ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_13FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_14ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_14FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_15ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_15FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_16ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_16FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_1ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_1FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_2ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_2FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_3ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_3FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_4ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_4FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_5ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_5FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_6ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_6FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_7ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_7FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_8ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_8FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_9ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_9FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_cu_ft,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_CU_Ft,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_cubic_feet_to_barrels,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Cubic_Feet_To_Barrels,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_hi_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Hi_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_hi_clr_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Hi_Clr_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_hi_reset,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Hi_Reset,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_hi_set,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Hi_Set,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_hi_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Hi_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_level,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Lev,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_level_psi,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Lev_Psi,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_lo_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Lo_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_lo_clr_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Lo_Clr_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_lo_reset,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Lo_Reset,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_lo_set,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Lo_Set,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_lo_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Lo_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_pump_off_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Pump_Off_Spt,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_pump_on_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Pump_On_Spt,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_raw_max,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Raw_Max,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_raw_min,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Raw_Min,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_rise_multiplier,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Rise_Multiplier,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_scaled_max,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_ScaledMax,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_scaled_min,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_ScaledMin,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_total_barrels,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Total_Barrels,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_auto,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_AUTO_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_hand,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_HAND_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_overload_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_OVERLOAD_ALARM,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_overload,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_OVERLOAD_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_run_cmd,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_RUN_CMD,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_run_fail_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_RUN_FAIL_ALARM,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_run,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_RUN_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_start,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_START_BTN_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_auto,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_AUTO_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_hand,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_HAND_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_overload_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_OVERLOAD_ALARM,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_overload,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_OVERLOAD_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_run_cmd,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_RUN_CMD,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_run_fail_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_RUN_FAIL_ALARM,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_run,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_RUN_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_start,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_START_BTN_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
reset,leak_detection,fastReport,periodic,,,BIT,,0,0,0,RESET,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
|
276
Pub_Sub/fk_leak_detection/thingsboard/pub/sendData-bak.py
Normal file
276
Pub_Sub/fk_leak_detection/thingsboard/pub/sendData-bak.py
Normal file
@@ -0,0 +1,276 @@
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from datetime import datetime as dt
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from quickfaas.global_dict import get as get_params
|
||||
from quickfaas.global_dict import _set_global_args
|
||||
from mobiuspi_lib.gps import GPS
|
||||
|
||||
|
||||
def reboot():
|
||||
# basic = Basic()
|
||||
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||
logger.info(f"REBOOT : {r}")
|
||||
|
||||
|
||||
def checkFileExist(filename):
|
||||
path = "/var/user/files"
|
||||
if not os.path.exists(path):
|
||||
logger.info("no folder making files folder in var/user")
|
||||
os.makedirs(path)
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
if not os.path.exists(path + "/" + filename):
|
||||
logger.info("no creds file making creds file")
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
|
||||
def convertJSONtoDS(j):
|
||||
d = []
|
||||
for key in j.keys():
|
||||
d.append({"key": key, "value": j[key]})
|
||||
return d
|
||||
|
||||
|
||||
def checkCredentialConfig():
|
||||
logger.info("CHECKING CONFIG")
|
||||
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||
credspath = "/var/user/files/creds.json"
|
||||
cfg = dict()
|
||||
with open(cfgpath, "r") as f:
|
||||
cfg = json.load(f)
|
||||
clouds = cfg.get("clouds")
|
||||
logger.info(clouds)
|
||||
# if not configured then try to configure from stored values
|
||||
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
logger.info("updating config with stored data")
|
||||
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||
clouds[0]["args"]["username"] = creds["userName"]
|
||||
clouds[0]["args"]["passwd"] = creds["password"]
|
||||
cfg["clouds"] = clouds
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
reboot()
|
||||
else:
|
||||
# assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
logger.info("updating stored file with new data")
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
else:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
with open(credspath, "w") as cw:
|
||||
json.dump(creds, cw)
|
||||
|
||||
|
||||
def checkParameterConfig(cfg):
|
||||
logger.info("Checking Parameters!!!!")
|
||||
paramspath = "/var/user/files/params.json"
|
||||
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||
# check stored values
|
||||
checkFileExist("params.json")
|
||||
with open(paramspath, "r") as f:
|
||||
logger.info("Opened param storage file")
|
||||
params = json.load(f)
|
||||
if params:
|
||||
if cfgparams != params:
|
||||
# go through each param
|
||||
# if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||
# if key in cfg but not in params copy to params
|
||||
logger.info("equalizing params between cfg and stored")
|
||||
for key in cfgparams.keys():
|
||||
try:
|
||||
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||
params[key] = cfgparams[key]
|
||||
except:
|
||||
params[key] = cfgparams[key]
|
||||
cfg["labels"] = convertJSONtoDS(params)
|
||||
_set_global_args(convertJSONtoDS(params))
|
||||
with open(paramspath, "w") as p:
|
||||
json.dump(params, p)
|
||||
else:
|
||||
with open(paramspath, "w") as p:
|
||||
logger.info("initializing param file with params in memory")
|
||||
json.dump(convertDStoJSON(get_params()), p)
|
||||
cfg["labels"] = get_params()
|
||||
|
||||
return cfg
|
||||
|
||||
|
||||
def getGPS():
|
||||
# Create a gps instance
|
||||
gps = GPS()
|
||||
|
||||
# Retrieve GPS information
|
||||
position_status = gps.get_position_status()
|
||||
logger.debug("position_status: ")
|
||||
logger.debug(position_status)
|
||||
latitude = position_status["latitude"].split(" ")
|
||||
longitude = position_status["longitude"].split(" ")
|
||||
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
|
||||
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
|
||||
if latitude[2] == "S":
|
||||
lat_dec = lat_dec * -1
|
||||
if longitude[2] == "W":
|
||||
lon_dec = lon_dec * -1
|
||||
# lat_dec = round(lat_dec, 7)
|
||||
# lon_dec = round(lon_dec, 7)
|
||||
logger.info("HERE IS THE GPS COORDS")
|
||||
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
|
||||
speedKnots = position_status["speed"].split(" ")
|
||||
speedMPH = float(speedKnots[0]) * 1.151
|
||||
return (f"{lat_dec:.8f}", f"{lon_dec:.8f}", f"{speedMPH:.2f}")
|
||||
|
||||
|
||||
def chunk_payload(payload, chunk_size=20):
|
||||
if "values" in payload:
|
||||
# Original format: {"ts": ..., "values": {...}}
|
||||
chunked_values = list(payload["values"].items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
"ts": payload["ts"],
|
||||
"values": dict(chunked_values[i:i+chunk_size])
|
||||
}
|
||||
else:
|
||||
# New format: {"key1": "value1", "key2": "value2"}
|
||||
chunked_keys = list(payload.keys())
|
||||
for i in range(0, len(chunked_keys), chunk_size):
|
||||
yield {k: payload[k] for k in chunked_keys[i:i+chunk_size]}
|
||||
|
||||
|
||||
def chunk_payload_devices(payload, chunk_size=20, is_attributes_payload=False):
|
||||
if is_attributes_payload:
|
||||
# For attributes payload, chunk the controllers
|
||||
controllers = list(payload.items())
|
||||
for i in range(0, len(controllers), chunk_size):
|
||||
yield dict(controllers[i:i + chunk_size])
|
||||
else:
|
||||
# For data payload, chunk the values within each controller
|
||||
for controller, data in payload.items():
|
||||
for entry in data:
|
||||
ts = entry['ts']
|
||||
values = entry['values']
|
||||
chunked_values = list(values.items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
controller: [{
|
||||
"ts": ts,
|
||||
"values": dict(chunked_values[i:i + chunk_size])
|
||||
}]
|
||||
}
|
||||
|
||||
|
||||
def controlName(name):
|
||||
logger.debug(name)
|
||||
params = convertDStoJSON(get_params())
|
||||
logger.debug(params)
|
||||
nameMap = {
|
||||
"overflow_pump": f"{params['overflow_pump']}",
|
||||
"leak_detection": f"{params['leak_detection']}"
|
||||
}
|
||||
return nameMap.get(name, "Gateway")
|
||||
|
||||
|
||||
def sendData(message):
|
||||
# logger.info(message)
|
||||
grouped_data = {}
|
||||
grouped_attributes = {}
|
||||
now = (round(dt.timestamp(dt.now())/600)*600)*1000
|
||||
payload = {"ts": now, "values": {}}
|
||||
attributes_payload = {}
|
||||
logger.debug(message)
|
||||
for measure in message["measures"]:
|
||||
try:
|
||||
logger.debug(measure)
|
||||
ctrlName = controlName(measure["ctrlName"])
|
||||
logger.debug(ctrlName)
|
||||
if ctrlName == "Gateway":
|
||||
# send to gateway with v1/devices/me/telemetry
|
||||
if measure["health"] == 1:
|
||||
if "_spt" in measure["name"]:
|
||||
attributes_payload[measure["name"]] = measure["value"]
|
||||
else:
|
||||
payload["values"][measure["name"]] = measure["value"]
|
||||
else:
|
||||
name = measure['name']
|
||||
value = measure['value']
|
||||
health = measure['health']
|
||||
# Add controller for telemetry if it doesn't exist
|
||||
if ctrlName not in grouped_data:
|
||||
grouped_data[ctrlName] = {}
|
||||
# Add controller for attributes if it doesn't exist
|
||||
if ctrlName not in grouped_attributes:
|
||||
grouped_attributes[ctrlName] = {}
|
||||
grouped_attributes[ctrlName]["latestReportTime"] = now
|
||||
# Add data to temp payload if datapoint health is good
|
||||
if health:
|
||||
if "_spt" in name:
|
||||
grouped_attributes[ctrlName][name] = value
|
||||
else:
|
||||
grouped_data[ctrlName][name] = value
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
# Transform the grouped data to desired structure
|
||||
payload_devices = {}
|
||||
#logger.info(grouped_data)
|
||||
for key, value in grouped_data.items():
|
||||
if value:
|
||||
payload_devices[key] = [{"ts": now, "values": value}]
|
||||
|
||||
attributes_payload_devices = {}
|
||||
for key, value in grouped_attributes.items():
|
||||
if value:
|
||||
attributes_payload_devices[key] = value
|
||||
#logger.info(payload_devices)
|
||||
#logger.info(attributes_payload_devices)
|
||||
#logger.info(payload)
|
||||
#logger.info(attributes)
|
||||
# Send data belonging to Gateway
|
||||
for chunk in chunk_payload(payload=payload):
|
||||
publish(__topic__, json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
attributes_payload["latestReportTime"] = (
|
||||
round(dt.timestamp(dt.now())/600)*600)*1000
|
||||
for chunk in chunk_payload(payload=attributes_payload):
|
||||
publish("v1/devices/me/attributes", json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
# Send gateway devices data
|
||||
for chunk in chunk_payload_devices(payload=payload_devices):
|
||||
publish("v1/gateway/telemetry", json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
for chunk in chunk_payload_devices(payload=attributes_payload_devices, is_attributes_payload=True):
|
||||
publish("v1/gateway/attributes",
|
||||
json.dumps(attributes_payload_devices), __qos__)
|
||||
time.sleep(2)
|
||||
476
Pub_Sub/fk_leak_detection/thingsboard/pub/sendData.py
Normal file
476
Pub_Sub/fk_leak_detection/thingsboard/pub/sendData.py
Normal file
@@ -0,0 +1,476 @@
|
||||
import json, os, time, shutil, math
|
||||
from datetime import datetime as dt
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from quickfaas.global_dict import get as get_params
|
||||
from quickfaas.global_dict import _set_global_args
|
||||
from mobiuspi_lib.gps import GPS
|
||||
|
||||
class RuntimeStats:
|
||||
def __init__(self):
|
||||
self.runs = {}
|
||||
self.currentRun = 0
|
||||
self.today = ""
|
||||
self.todayString = ""
|
||||
|
||||
def manageTime(self):
|
||||
if self.todayString != dt.strftime(dt.today(), "%Y-%m-%d"):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.mktime(dt.strptime(self.todayString + " 23:59:59", "%Y-%m-%d %H:%M:%S").timetuple())
|
||||
self.addDay()
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
while (dt.strptime(days[-1],"%Y-%m-%d") - dt.strptime(days[0], "%Y-%m-%d")).days > 40:
|
||||
self.removeDay(day=days[0])
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
|
||||
def addHertzDataPoint(self, frequency):
|
||||
if frequency > 0:
|
||||
self.manageTime()
|
||||
try:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"].append(frequency)
|
||||
except:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"] = [frequency]
|
||||
|
||||
def startRun(self):
|
||||
if self.checkRunning():
|
||||
self.endRun()
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] = time.time()
|
||||
|
||||
def endRun(self):
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.time()
|
||||
self.currentRun += 1
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def checkRunning(self):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def addDay(self):
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
self.currentRun = 1
|
||||
self.runs[self.todayString] = {}
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def countRunsDay(self, day=None):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
return len(self.runs[day].keys())
|
||||
|
||||
def countRunsMultiDay(self, numDays=30):
|
||||
total_runs = 0
|
||||
for day in list(self.runs.keys()):
|
||||
total_runs += self.countRunsDay(day=day)
|
||||
return total_runs
|
||||
|
||||
def calculateAverageHertzDay(self, day=None, returnArray=False):
|
||||
dayFrequencies = []
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
try:
|
||||
dayFrequencies += self.runs[day][run]["frequencies"]
|
||||
except Exception as e:
|
||||
logger.debug("{} missing frequency data for {}".format(day,run))
|
||||
if returnArray:
|
||||
return dayFrequencies
|
||||
return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)
|
||||
|
||||
def calculateAverageHertzMultiDay(self, numDays=30):
|
||||
self.manageTime()
|
||||
frequencies = []
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
try:
|
||||
frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)
|
||||
except Exception as e:
|
||||
logger.debug("{} missing frequency data".format(day))
|
||||
if len(frequencies):
|
||||
return round(math.fsum(frequencies)/len(frequencies), 2)
|
||||
return 0
|
||||
|
||||
def calculateRunTimeDay(self, day=None, convertToHours=True):
|
||||
self.manageTime()
|
||||
total_time = 0
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs.get(day,{}).keys()):
|
||||
if self.runs[day][run]["end"] == 0 and self.runs[day][run]["start"] != 0:
|
||||
total_time = time.time() - self.runs[day][run]["start"] + total_time
|
||||
else:
|
||||
total_time = self.runs[day][run]["end"] - self.runs[day][run]["start"] + total_time
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):
|
||||
total_time = 0
|
||||
for day in list(self.runs.keys()):
|
||||
if (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
total_time += self.calculateRunTimeDay(day=day, convertToHours=False)
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunPercentDay(self, day=None, precise=False):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
if precise:
|
||||
return (self.calculateRunTimeDay(day=day)/24) * 100
|
||||
return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)
|
||||
|
||||
|
||||
def calculateRunPercentMultiDay(self, numDays=30, precise=False):
|
||||
self.manageTime()
|
||||
if precise:
|
||||
return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100
|
||||
return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)
|
||||
|
||||
def removeDay(self, day=None):
|
||||
if not day:
|
||||
raise Exception("Day can not be None")
|
||||
logger.debug("removing day {}".format(day))
|
||||
del self.runs[day]
|
||||
self.saveDataToFile()
|
||||
|
||||
def convertSecondstoHours(self, seconds):
|
||||
return round(seconds / (60*60),2)
|
||||
|
||||
def resetData(self):
|
||||
logger.debug("clearing database")
|
||||
try:
|
||||
for day in list(self.runs.keys()):
|
||||
self.removeDay(day=day)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
return False
|
||||
self.runs = {}
|
||||
self.currentRun = 0
|
||||
self.today = ""
|
||||
self.todayString = ""
|
||||
self.manageTime()
|
||||
return True
|
||||
|
||||
def loadDataFromFile(self, filePath="/var/user/files/runtimestats.json"):
|
||||
try:
|
||||
with open(filePath, "r") as f:
|
||||
temp = json.load(f)
|
||||
self.runs = temp["data"]
|
||||
self.currentRun = temp["current_run"]
|
||||
self.today = dt.strptime(temp["current_day"], "%Y-%m-%d")
|
||||
self.todayString = temp["current_day"]
|
||||
self.manageTime()
|
||||
except:
|
||||
logger.debug("Could not find file at {}".format(filePath))
|
||||
logger.debug("creating file")
|
||||
self.addDay()
|
||||
try:
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
def saveDataToFile(self, filePath="/var/user/files/runtimestats.json"):
|
||||
try:
|
||||
logger.debug("Saving Runs")
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
rts = RuntimeStats()
|
||||
rts.loadDataFromFile()
|
||||
rts.saveDataToFile()
|
||||
|
||||
def reboot():
|
||||
# basic = Basic()
|
||||
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||
logger.info(f"REBOOT : {r}")
|
||||
|
||||
|
||||
def checkFileExist(filename):
|
||||
path = "/var/user/files"
|
||||
if not os.path.exists(path):
|
||||
logger.info("no folder making files folder in var/user")
|
||||
os.makedirs(path)
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
if not os.path.exists(path + "/" + filename):
|
||||
logger.info("no creds file making creds file")
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
|
||||
def convertJSONtoDS(j):
|
||||
d = []
|
||||
for key in j.keys():
|
||||
d.append({"key": key, "value": j[key]})
|
||||
return d
|
||||
|
||||
|
||||
def checkCredentialConfig():
|
||||
logger.info("CHECKING CONFIG")
|
||||
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||
credspath = "/var/user/files/creds.json"
|
||||
cfg = dict()
|
||||
with open(cfgpath, "r") as f:
|
||||
cfg = json.load(f)
|
||||
clouds = cfg.get("clouds")
|
||||
logger.info(clouds)
|
||||
# if not configured then try to configure from stored values
|
||||
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
logger.info("updating config with stored data")
|
||||
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||
clouds[0]["args"]["username"] = creds["userName"]
|
||||
clouds[0]["args"]["passwd"] = creds["password"]
|
||||
cfg["clouds"] = clouds
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
reboot()
|
||||
else:
|
||||
# assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
logger.info("updating stored file with new data")
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
else:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
with open(credspath, "w") as cw:
|
||||
json.dump(creds, cw)
|
||||
|
||||
|
||||
def checkParameterConfig(cfg):
|
||||
logger.info("Checking Parameters!!!!")
|
||||
paramspath = "/var/user/files/params.json"
|
||||
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||
# check stored values
|
||||
checkFileExist("params.json")
|
||||
with open(paramspath, "r") as f:
|
||||
logger.info("Opened param storage file")
|
||||
params = json.load(f)
|
||||
if params:
|
||||
if cfgparams != params:
|
||||
# go through each param
|
||||
# if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||
# if key in cfg but not in params copy to params
|
||||
logger.info("equalizing params between cfg and stored")
|
||||
for key in cfgparams.keys():
|
||||
try:
|
||||
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||
params[key] = cfgparams[key]
|
||||
except:
|
||||
params[key] = cfgparams[key]
|
||||
cfg["labels"] = convertJSONtoDS(params)
|
||||
_set_global_args(convertJSONtoDS(params))
|
||||
with open(paramspath, "w") as p:
|
||||
json.dump(params, p)
|
||||
else:
|
||||
with open(paramspath, "w") as p:
|
||||
logger.info("initializing param file with params in memory")
|
||||
json.dump(convertDStoJSON(get_params()), p)
|
||||
cfg["labels"] = get_params()
|
||||
|
||||
return cfg
|
||||
|
||||
|
||||
def getGPS():
|
||||
# Create a gps instance
|
||||
gps = GPS()
|
||||
|
||||
# Retrieve GPS information
|
||||
position_status = gps.get_position_status()
|
||||
logger.debug("position_status: ")
|
||||
logger.debug(position_status)
|
||||
latitude = position_status["latitude"].split(" ")
|
||||
longitude = position_status["longitude"].split(" ")
|
||||
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
|
||||
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
|
||||
if latitude[2] == "S":
|
||||
lat_dec = lat_dec * -1
|
||||
if longitude[2] == "W":
|
||||
lon_dec = lon_dec * -1
|
||||
# lat_dec = round(lat_dec, 7)
|
||||
# lon_dec = round(lon_dec, 7)
|
||||
logger.info("HERE IS THE GPS COORDS")
|
||||
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
|
||||
speedKnots = position_status["speed"].split(" ")
|
||||
speedMPH = float(speedKnots[0]) * 1.151
|
||||
return (f"{lat_dec:.8f}", f"{lon_dec:.8f}", f"{speedMPH:.2f}")
|
||||
|
||||
|
||||
def chunk_payload(payload, chunk_size=20):
|
||||
if "values" in payload:
|
||||
# Original format: {"ts": ..., "values": {...}}
|
||||
chunked_values = list(payload["values"].items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
"ts": payload["ts"],
|
||||
"values": dict(chunked_values[i:i+chunk_size])
|
||||
}
|
||||
else:
|
||||
# New format: {"key1": "value1", "key2": "value2"}
|
||||
chunked_keys = list(payload.keys())
|
||||
for i in range(0, len(chunked_keys), chunk_size):
|
||||
yield {k: payload[k] for k in chunked_keys[i:i+chunk_size]}
|
||||
|
||||
|
||||
def chunk_payload_devices(payload, chunk_size=20, is_attributes_payload=False):
|
||||
if is_attributes_payload:
|
||||
# For attributes payload, chunk the controllers
|
||||
controllers = list(payload.items())
|
||||
for i in range(0, len(controllers), chunk_size):
|
||||
yield dict(controllers[i:i + chunk_size])
|
||||
else:
|
||||
# For data payload, chunk the values within each controller
|
||||
for controller, data in payload.items():
|
||||
for entry in data:
|
||||
ts = entry['ts']
|
||||
values = entry['values']
|
||||
chunked_values = list(values.items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
controller: [{
|
||||
"ts": ts,
|
||||
"values": dict(chunked_values[i:i + chunk_size])
|
||||
}]
|
||||
}
|
||||
|
||||
|
||||
def controlName(name):
|
||||
logger.debug(name)
|
||||
params = convertDStoJSON(get_params())
|
||||
logger.debug(params)
|
||||
nameMap = {
|
||||
"overflow_pump": f"{params['overflow_pump']}",
|
||||
"leak_detection": f"{params['leak_detection']}"
|
||||
}
|
||||
return nameMap.get(name, "Gateway")
|
||||
|
||||
|
||||
def sendData(message):
|
||||
# logger.info(message)
|
||||
rts.loadDataFromFile()
|
||||
grouped_data = {}
|
||||
grouped_attributes = {}
|
||||
now = (round(dt.timestamp(dt.now())/600)*600)*1000
|
||||
payload = {"ts": now, "values": {}}
|
||||
attributes_payload = {}
|
||||
logger.debug(message)
|
||||
for measure in message["measures"]:
|
||||
try:
|
||||
logger.debug(measure)
|
||||
ctrlName = controlName(measure["ctrlName"])
|
||||
logger.debug(ctrlName)
|
||||
if ctrlName == "Gateway":
|
||||
# send to gateway with v1/devices/me/telemetry
|
||||
if measure["health"] == 1:
|
||||
if "_spt" in measure["name"]:
|
||||
attributes_payload[measure["name"]] = measure["value"]
|
||||
else:
|
||||
payload["values"][measure["name"]] = measure["value"]
|
||||
else:
|
||||
name = measure['name']
|
||||
value = measure['value']
|
||||
health = measure['health']
|
||||
# Add controller for telemetry if it doesn't exist
|
||||
if ctrlName not in grouped_data:
|
||||
grouped_data[ctrlName] = {}
|
||||
# Add controller for attributes if it doesn't exist
|
||||
if ctrlName not in grouped_attributes:
|
||||
grouped_attributes[ctrlName] = {}
|
||||
grouped_attributes[ctrlName]["latestReportTime"] = now
|
||||
# Add data to temp payload if datapoint health is good
|
||||
if health:
|
||||
if "_spt" in name:
|
||||
grouped_attributes[ctrlName][name] = value
|
||||
if name in ["p001_run"]:
|
||||
rts.manageTime()
|
||||
if value == 1 and not rts.runs[rts.todayString]["run_" + str(rts.currentRun)]["start"]:
|
||||
rts.startRun()
|
||||
rts.saveDataToFile()
|
||||
elif value == 0 and rts.runs[rts.todayString]["run_" + str(rts.currentRun)]["start"] and not rts.runs[rts.todayString]["run_" + str(rts.currentRun)]["end"]:
|
||||
rts.endRun()
|
||||
rts.saveDataToFile()
|
||||
grouped_data[ctrlName][name + "_today_running_hours"] = rts.calculateRunTimeDay()
|
||||
grouped_data[ctrlName][name + "_month_running_hours"] = rts.calculateRunTimeMultiDay(numDays=dt.today().day)
|
||||
else:
|
||||
grouped_data[ctrlName][name] = value
|
||||
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
# Transform the grouped data to desired structure
|
||||
payload_devices = {}
|
||||
#logger.info(grouped_data)
|
||||
for key, value in grouped_data.items():
|
||||
if value:
|
||||
payload_devices[key] = [{"ts": now, "values": value}]
|
||||
|
||||
attributes_payload_devices = {}
|
||||
for key, value in grouped_attributes.items():
|
||||
if value:
|
||||
attributes_payload_devices[key] = value
|
||||
#logger.info(payload_devices)
|
||||
#logger.info(attributes_payload_devices)
|
||||
#logger.info(payload)
|
||||
#logger.info(attributes)
|
||||
# Send data belonging to Gateway
|
||||
for chunk in chunk_payload(payload=payload):
|
||||
publish(__topic__, json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
attributes_payload["latestReportTime"] = (
|
||||
round(dt.timestamp(dt.now())/600)*600)*1000
|
||||
for chunk in chunk_payload(payload=attributes_payload):
|
||||
publish("v1/devices/me/attributes", json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
# Send gateway devices data
|
||||
for chunk in chunk_payload_devices(payload=payload_devices):
|
||||
publish("v1/gateway/telemetry", json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
for chunk in chunk_payload_devices(payload=attributes_payload_devices, is_attributes_payload=True):
|
||||
publish("v1/gateway/attributes",
|
||||
json.dumps(attributes_payload_devices), __qos__)
|
||||
time.sleep(2)
|
||||
2392
Pub_Sub/fk_leak_detection/thingsboard/tag_dump_2025_October_10.json
Normal file
2392
Pub_Sub/fk_leak_detection/thingsboard/tag_dump_2025_October_10.json
Normal file
File diff suppressed because it is too large
Load Diff
2348
Pub_Sub/fk_overflow_pump/thingsboard/tag_dump_2025_October_10.json
Normal file
2348
Pub_Sub/fk_overflow_pump/thingsboard/tag_dump_2025_October_10.json
Normal file
File diff suppressed because it is too large
Load Diff
2016
Pub_Sub/fk_plcpond/thingsboard/tag_dump_2025_October_10.json
Normal file
2016
Pub_Sub/fk_plcpond/thingsboard/tag_dump_2025_October_10.json
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user