updated leak detection
This commit is contained in:
@@ -0,0 +1,109 @@
|
||||
MeasuringPointName,ControllerName,GroupName,UploadType,DeadZoneType,DeadZonePercent,DataType,ArrayIndex,EnableBit,BitIndex,reverseBit,Address,Decimal,Len,CodeType,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit,Pt,Ct,Mapping_table,TransDecimal,bitMap,msecSample,storageLwTSDB,DataEndianReverse,ReadOffset,ReadLength,WriteOffset,WriteLength,DataParseMethod,BitId,pollCycle,EnableRequestCount,RequestCount,,
|
||||
fm_1_flowrate,leak_detection,fastReport,periodic,,,FLOAT,,,,,FIT1_FLOWRATE,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
fm_1_lolo_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,FIT1_LL_ALARM,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
fm_1_lolo_reset_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,FIT1_LL_RESET_SPT,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
fm_1_lolo_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,FIT1_LL_SET_SPT,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
fm_1_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,FIT1_TOTAL_FLOWRATE,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
fm_2_flowrate,leak_detection,fastReport,periodic,,,FLOAT,,,,,FIT2_FLOWRATE,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
fm_2_lolo_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,FIT2_LL_ALARM,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
fm_2_lolo_reset_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,FIT2_LL_RESET_SPT,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
fm_2_lolo_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,FIT2_LL_SET_SPT,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
fm_2_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,FIT2_TOTAL_FLOWRATE,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_0ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_0FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_10ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_10FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_11ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_11FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_12ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_12FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_13ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_13FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_14ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_14FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_15ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_15FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_16ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_16FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_1ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_1FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_2ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_2FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_3ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_3FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_4ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_4FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_5ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_5FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_6ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_6FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_7ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_7FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_8ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_8FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_9ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_9FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_cu_ft,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_CU_Ft,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_cubic_feet_to_barrels,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Cubic_Feet_To_Barrels,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Hi_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_alm_enable,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Hi_Alarm_Enable,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_alm_enabled,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Hi_Alarm_Enabled,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_clr_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Hi_Clr_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_reset,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Hi_Reset,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_set,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Hi_Set,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_hi_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Hi_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_level,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Lev,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_level_psi,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Lev_Psi,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_lo_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Lo_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_lo_clr_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Lo_Clr_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_lo_reset,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Lo_Reset,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_lo_set,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_1_Lo_Set,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_lo_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Lo_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_pump_off_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Pump_Off_Spt,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_pump_on_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Pump_On_Spt,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_raw_max,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Raw_Max,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_raw_min,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Raw_Min,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_rise_multiplier,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Rise_Multiplier,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_scaled_max,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_ScaledMax,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_scaled_min,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_ScaledMin,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_stage_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Stage_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_1_total_barrels,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_1_Total_Barrels,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_0ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_0FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_10ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_10FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_11ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_11FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_12ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_12FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_13ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_13FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_14ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_14FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_15ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_15FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_16ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_16FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_1ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_1FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_2ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_2FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_3ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_3FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_4ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_4FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_5ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_5FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_6ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_6FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_7ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_7FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_8ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_8FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_9ft_volume_total,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_9FT_Volume_Total,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_cu_ft,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_CU_Ft,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_cubic_feet_to_barrels,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Cubic_Feet_To_Barrels,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_hi_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Hi_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_hi_clr_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Hi_Clr_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_hi_reset,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Hi_Reset,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_hi_set,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Hi_Set,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_hi_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Hi_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_level,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Lev,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_level_psi,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Lev_Psi,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_lo_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Lo_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_lo_clr_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Lo_Clr_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_lo_reset,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Lo_Reset,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_lo_set,leak_detection,fastReport,periodic,,,BIT,,0,0,0,Leak_2_Lo_Set,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_lo_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Lo_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_pump_off_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Pump_Off_Spt,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_pump_on_spt,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Pump_On_Spt,2,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_raw_max,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Raw_Max,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_raw_min,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Raw_Min,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_rise_multiplier,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Rise_Multiplier,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_scaled_max,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_ScaledMax,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_scaled_min,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_ScaledMin,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
leak_2_total_barrels,leak_detection,fastReport,periodic,,,FLOAT,,,,,Leak_2_Total_Barrels,2,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_auto,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_AUTO_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_hand,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_HAND_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_overload,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_OVERLOAD_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_overload_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_OVERLOAD_ALARM,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_run,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_RUN_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_run_cmd,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_RUN_CMD,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_run_fail_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_RUN_FAIL_ALARM,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p001_start,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P001_START_BTN_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_auto,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_AUTO_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_hand,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_HAND_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_overload,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_OVERLOAD_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_overload_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_OVERLOAD_ALARM,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_run,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_RUN_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_run_cmd,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_RUN_CMD,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_run_fail_alm,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_RUN_FAIL_ALARM,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
p002_start,leak_detection,fastReport,periodic,,,BIT,,0,0,0,P002_START_BTN_FBK,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
reset,leak_detection,fastReport,periodic,,,BIT,,0,0,0,RESET,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,,,,
|
||||
|
2392
Pub_Sub/fk_leak_detection/thingsboard/tag_dump_2025_December_09.json
Normal file
2392
Pub_Sub/fk_leak_detection/thingsboard/tag_dump_2025_December_09.json
Normal file
File diff suppressed because it is too large
Load Diff
2392
Pub_Sub/fk_leak_detection/thingsboard/tag_dump_2025_December_10.json
Normal file
2392
Pub_Sub/fk_leak_detection/thingsboard/tag_dump_2025_December_10.json
Normal file
File diff suppressed because it is too large
Load Diff
6
Pub_Sub/fk_overflow_pump/thingsboard/alarm.csv
Normal file
6
Pub_Sub/fk_overflow_pump/thingsboard/alarm.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
AlarmName,ControllerName,MeasuringPointName,AlarmLevel,Condition1,Operand1,CombineMethod,Condition2,Operand2,AlarmContent,AlarmTag
|
||||
air_comp_low_alm,plcpond,air_comp_low_alm,5,eq,1,none,eq,,Alarm Triggered,default
|
||||
pond_1_hi_alm,overflow_pump,pond_1_hi_alm,5,eq,1,none,eq,,Pond 2 Lo,gateway
|
||||
pond_1_lo_alm,overflow_pump,pond_1_lo_alm,5,eq,1,none,eq,,Pond 1 Lo,gateway
|
||||
pump_1_run_fail_alm,overflow_pump,pump_1_run_fail_alm,5,eq,1,none,eq,,Alarm Triggered,gateway
|
||||
pump_2_run_fail_alm,overflow_pump,pump_2_run_fail_alm,5,eq,1,none,eq,,Alarm Triggered,gateway
|
||||
|
14
Pub_Sub/fk_plcpond_gateway/thingsboard/alarm.csv
Normal file
14
Pub_Sub/fk_plcpond_gateway/thingsboard/alarm.csv
Normal file
@@ -0,0 +1,14 @@
|
||||
AlarmName,ControllerName,MeasuringPointName,AlarmLevel,Condition1,Operand1,CombineMethod,Condition2,Operand2,AlarmContent,AlarmTag
|
||||
air_comp_low_alm,plcpond,air_comp_low_alm,5,eq,1.0,none,eq,,Alarm Triggered,default
|
||||
pond_1_hi_alm,overflow_pump,pond_1_hi_alm,5,eq,1.0,none,eq,,Pond 2 Lo,gateway
|
||||
pond_1_lo_alm,overflow_pump,pond_1_lo_alm,5,eq,1.0,none,eq,,Pond 1 Lo,gateway
|
||||
pump_1_run_fail_alm,overflow_pump,pump_1_run_fail_alm,5,eq,1.0,none,eq,,Alarm Triggered,gateway
|
||||
pump_2_run_fail_alm,overflow_pump,pump_2_run_fail_alm,5,eq,1.0,none,eq,,Alarm Triggered,gateway
|
||||
leak_1_hi_alm,leak_detection,leak_1_hi_alm,5,eq,1,none,eq,,Alarm Triggered,gateway
|
||||
leak_1_lo_alm,leak_detection,leak_1_lo_alm,5,eq,1,none,eq,,Alarm Triggered,gateway
|
||||
leak_2_hi_alm,leak_detection,leak_2_hi_alm,5,eq,1,none,eq,,Alarm Triggered,gateway
|
||||
leak_2_lo_alm,leak_detection,leak_2_lo_alm,5,eq,1,none,eq,,Alarm Triggered,gateway
|
||||
p001_overload_alm,leak_detection,p001_overload_alm,5,eq,1,none,eq,,Alarm Triggered,gateway
|
||||
p001_run_fail_alm,leak_detection,p001_run_fail_alm,5,eq,1,none,eq,,Alarm Triggered,gateway
|
||||
p002_overload_alm,leak_detection,p002_overload_alm,5,eq,1,none,eq,,Alarm Triggered,gateway
|
||||
p002_run_fail_alm,leak_detection,p002_run_fail_alm,5,eq,1,none,eq,,Alarm Triggered,gateway
|
||||
|
2733
Pub_Sub/fk_plcpond_gateway/thingsboard/fk_plcpond_tb_v5.cfg
Normal file
2733
Pub_Sub/fk_plcpond_gateway/thingsboard/fk_plcpond_tb_v5.cfg
Normal file
File diff suppressed because one or more lines are too long
11
Pub_Sub/fk_plcpond_gateway/thingsboard/pub/sendAlarm.py
Normal file
11
Pub_Sub/fk_plcpond_gateway/thingsboard/pub/sendAlarm.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import json, time
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
|
||||
|
||||
def sendAlarm(message):
|
||||
logger.info(message)
|
||||
payload = {}
|
||||
payload["ts"] = time.time()*1000
|
||||
payload["values"] = {message["measureName"]: message["value"]}
|
||||
publish(__topic__, json.dumps(payload), __qos__,cloud_name="default")
|
||||
@@ -0,0 +1,28 @@
|
||||
# Enter your python code.
|
||||
import json, time
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from quickfaas.global_dict import get as get_params
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
def controlName(name):
|
||||
logger.debug(name)
|
||||
params = convertDStoJSON(get_params())
|
||||
logger.debug(params)
|
||||
nameMap = {
|
||||
"overflow_pump": f"{params['overflow_pump']}",
|
||||
"leak_detection": f"{params['leak_detection']}"
|
||||
}
|
||||
return nameMap.get(name, "Gateway")
|
||||
|
||||
def sendAlarmGateway(message, wizard_api):
|
||||
logger.info(message)
|
||||
for measure, data in message["values"].items():
|
||||
ctrlName = controlName(data["ctrlName"])
|
||||
payload = {ctrlName: [{"ts": message["timestamp"]*1000, "values": {data["measureName"]: data["value"]}}]}
|
||||
publish(__topic__, json.dumps(payload), __qos__, cloud_name="default")
|
||||
733
Pub_Sub/fk_plcpond_gateway/thingsboard/pub/sendDataFast.py
Normal file
733
Pub_Sub/fk_plcpond_gateway/thingsboard/pub/sendDataFast.py
Normal file
@@ -0,0 +1,733 @@
|
||||
import json, os, time, shutil, math
|
||||
from datetime import datetime as dt
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from quickfaas.global_dict import get as get_params
|
||||
from quickfaas.global_dict import _set_global_args
|
||||
from mobiuspi_lib.gps import GPS
|
||||
|
||||
class RuntimeStats:
|
||||
def __init__(self):
|
||||
self.runs = {}
|
||||
self.currentRun = 0
|
||||
self.today = ""
|
||||
self.todayString = ""
|
||||
self.filePath = "/var/user/files/runtimestats.json"
|
||||
|
||||
def __init__(self, filePath:str):
|
||||
self.runs = {}
|
||||
self.currentRun = 0
|
||||
self.today = ""
|
||||
self.todayString = ""
|
||||
self.filePath = filePath
|
||||
|
||||
def manageTime(self):
|
||||
if self.todayString != dt.strftime(dt.today(), "%Y-%m-%d"):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.mktime(dt.strptime(self.todayString + " 23:59:59", "%Y-%m-%d %H:%M:%S").timetuple())
|
||||
self.addDay()
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
while (dt.strptime(days[-1],"%Y-%m-%d") - dt.strptime(days[0], "%Y-%m-%d")).days > 40:
|
||||
self.removeDay(day=days[0])
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
|
||||
def addHertzDataPoint(self, frequency):
|
||||
if frequency > 0:
|
||||
self.manageTime()
|
||||
try:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"].append(frequency)
|
||||
except:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"] = [frequency]
|
||||
|
||||
def startRun(self):
|
||||
if self.checkRunning():
|
||||
self.endRun()
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] = time.time()
|
||||
|
||||
def endRun(self):
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.time()
|
||||
self.currentRun += 1
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def checkRunning(self):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def addDay(self):
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
self.currentRun = 1
|
||||
self.runs[self.todayString] = {}
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def countRunsDay(self, day=None):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
return len(self.runs[day].keys())
|
||||
|
||||
def countRunsMultiDay(self, numDays=30):
|
||||
total_runs = 0
|
||||
for day in list(self.runs.keys()):
|
||||
total_runs += self.countRunsDay(day=day)
|
||||
return total_runs
|
||||
|
||||
def calculateAverageHertzDay(self, day=None, returnArray=False):
|
||||
dayFrequencies = []
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
try:
|
||||
dayFrequencies += self.runs[day][run]["frequencies"]
|
||||
except Exception as e:
|
||||
logger.debug("{} missing frequency data for {}".format(day,run))
|
||||
if returnArray:
|
||||
return dayFrequencies
|
||||
return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)
|
||||
|
||||
def calculateAverageHertzMultiDay(self, numDays=30):
|
||||
self.manageTime()
|
||||
frequencies = []
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
try:
|
||||
frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)
|
||||
except Exception as e:
|
||||
logger.debug("{} missing frequency data".format(day))
|
||||
if len(frequencies):
|
||||
return round(math.fsum(frequencies)/len(frequencies), 2)
|
||||
return 0
|
||||
|
||||
def calculateRunTimeDay(self, day=None, convertToHours=True):
|
||||
self.manageTime()
|
||||
total_time = 0
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs.get(day,{}).keys()):
|
||||
if self.runs[day][run]["end"] == 0 and self.runs[day][run]["start"] != 0:
|
||||
total_time = time.time() - self.runs[day][run]["start"] + total_time
|
||||
else:
|
||||
total_time = self.runs[day][run]["end"] - self.runs[day][run]["start"] + total_time
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):
|
||||
total_time = 0
|
||||
for day in list(self.runs.keys()):
|
||||
if (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
total_time += self.calculateRunTimeDay(day=day, convertToHours=False)
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunPercentDay(self, day=None, precise=False):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
if precise:
|
||||
return (self.calculateRunTimeDay(day=day)/24) * 100
|
||||
return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)
|
||||
|
||||
|
||||
def calculateRunPercentMultiDay(self, numDays=30, precise=False):
|
||||
self.manageTime()
|
||||
if precise:
|
||||
return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100
|
||||
return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)
|
||||
|
||||
def removeDay(self, day=None):
|
||||
if not day:
|
||||
raise Exception("Day can not be None")
|
||||
logger.debug("removing day {}".format(day))
|
||||
del self.runs[day]
|
||||
self.saveDataToFile(filePath=self.filePath)
|
||||
|
||||
def convertSecondstoHours(self, seconds):
|
||||
return round(seconds / (60*60),2)
|
||||
|
||||
def resetData(self):
|
||||
logger.debug("clearing database")
|
||||
try:
|
||||
for day in list(self.runs.keys()):
|
||||
self.removeDay(day=day, filePath=self.filePath)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
return False
|
||||
self.runs = {}
|
||||
self.currentRun = 0
|
||||
self.today = ""
|
||||
self.todayString = ""
|
||||
self.manageTime()
|
||||
return True
|
||||
|
||||
def loadDataFromFile(self):
|
||||
try:
|
||||
with open(self.filePath, "r") as f:
|
||||
temp = json.load(f)
|
||||
self.runs = temp["data"]
|
||||
self.currentRun = temp["current_run"]
|
||||
self.today = dt.strptime(temp["current_day"], "%Y-%m-%d")
|
||||
self.todayString = temp["current_day"]
|
||||
self.manageTime()
|
||||
except:
|
||||
logger.debug("Could not find file at {}".format(self.filePath))
|
||||
logger.debug("creating file")
|
||||
self.addDay()
|
||||
try:
|
||||
with open(self.filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
def saveDataToFile(self):
|
||||
try:
|
||||
logger.debug("Saving Runs")
|
||||
with open(self.filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
rts1 = RuntimeStats("/var/user/files/runtimestats_p001.json")
|
||||
rts1.loadDataFromFile()
|
||||
rts1.saveDataToFile()
|
||||
|
||||
rts2 = RuntimeStats("/var/user/files/runtimestats_p002.json")
|
||||
rts2.loadDataFromFile()
|
||||
rts2.saveDataToFile()
|
||||
|
||||
def reboot():
|
||||
# basic = Basic()
|
||||
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||
logger.info(f"REBOOT : {r}")
|
||||
|
||||
|
||||
def checkFileExist(filename):
|
||||
path = "/var/user/files"
|
||||
if not os.path.exists(path):
|
||||
logger.info("no folder making files folder in var/user")
|
||||
os.makedirs(path)
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
if not os.path.exists(path + "/" + filename):
|
||||
logger.info("no creds file making creds file")
|
||||
with open(path + "/" + filename, "a") as f:
|
||||
json.dump({}, f)
|
||||
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
|
||||
def convertJSONtoDS(j):
|
||||
d = []
|
||||
for key in j.keys():
|
||||
d.append({"key": key, "value": j[key]})
|
||||
return d
|
||||
|
||||
|
||||
def checkCredentialConfig():
|
||||
logger.info("CHECKING CONFIG")
|
||||
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||
credspath = "/var/user/files/creds.json"
|
||||
cfg = dict()
|
||||
with open(cfgpath, "r") as f:
|
||||
cfg = json.load(f)
|
||||
clouds = cfg.get("clouds")
|
||||
logger.info(clouds)
|
||||
# if not configured then try to configure from stored values
|
||||
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
logger.info("updating config with stored data")
|
||||
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||
clouds[0]["args"]["username"] = creds["userName"]
|
||||
clouds[0]["args"]["passwd"] = creds["password"]
|
||||
cfg["clouds"] = clouds
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
reboot()
|
||||
else:
|
||||
# assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||
checkFileExist("creds.json")
|
||||
with open(credspath, "r") as c:
|
||||
logger.info("updating stored file with new data")
|
||||
cfg = checkParameterConfig(cfg)
|
||||
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||
creds = json.load(c)
|
||||
if creds:
|
||||
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
else:
|
||||
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||
creds["userName"] = clouds[0]["args"]["username"]
|
||||
creds["password"] = clouds[0]["args"]["passwd"]
|
||||
with open(credspath, "w") as cw:
|
||||
json.dump(creds, cw)
|
||||
|
||||
|
||||
def checkParameterConfig(cfg):
|
||||
logger.info("Checking Parameters!!!!")
|
||||
paramspath = "/var/user/files/params.json"
|
||||
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||
# check stored values
|
||||
checkFileExist("params.json")
|
||||
with open(paramspath, "r") as f:
|
||||
logger.info("Opened param storage file")
|
||||
params = json.load(f)
|
||||
if params:
|
||||
if cfgparams != params:
|
||||
# go through each param
|
||||
# if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||
# if key in cfg but not in params copy to params
|
||||
logger.info("equalizing params between cfg and stored")
|
||||
for key in cfgparams.keys():
|
||||
try:
|
||||
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||
params[key] = cfgparams[key]
|
||||
except:
|
||||
params[key] = cfgparams[key]
|
||||
cfg["labels"] = convertJSONtoDS(params)
|
||||
_set_global_args(convertJSONtoDS(params))
|
||||
with open(paramspath, "w") as p:
|
||||
json.dump(params, p)
|
||||
else:
|
||||
with open(paramspath, "w") as p:
|
||||
logger.info("initializing param file with params in memory")
|
||||
json.dump(convertDStoJSON(get_params()), p)
|
||||
cfg["labels"] = get_params()
|
||||
|
||||
return cfg
|
||||
|
||||
|
||||
def getGPS():
|
||||
# Create a gps instance
|
||||
gps = GPS()
|
||||
|
||||
# Retrieve GPS information
|
||||
position_status = gps.get_position_status()
|
||||
logger.debug("position_status: ")
|
||||
logger.debug(position_status)
|
||||
latitude = position_status["latitude"].split(" ")
|
||||
longitude = position_status["longitude"].split(" ")
|
||||
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
|
||||
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
|
||||
if latitude[2] == "S":
|
||||
lat_dec = lat_dec * -1
|
||||
if longitude[2] == "W":
|
||||
lon_dec = lon_dec * -1
|
||||
# lat_dec = round(lat_dec, 7)
|
||||
# lon_dec = round(lon_dec, 7)
|
||||
logger.info("HERE IS THE GPS COORDS")
|
||||
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
|
||||
speedKnots = position_status["speed"].split(" ")
|
||||
speedMPH = float(speedKnots[0]) * 1.151
|
||||
return (f"{lat_dec:.8f}", f"{lon_dec:.8f}", f"{speedMPH:.2f}")
|
||||
|
||||
def initialize_totalizers():
|
||||
return {
|
||||
"day": 0,
|
||||
"week": 0,
|
||||
"month": 0,
|
||||
"year": 0,
|
||||
"lifetime": 0,
|
||||
"dayHolding": 0,
|
||||
"weekHolding": 0,
|
||||
"monthHolding": 0,
|
||||
"yearHolding": 0,
|
||||
"rolloverCounter": 0
|
||||
}
|
||||
|
||||
def getTotalizers(file_path="/var/user/files/totalizers.json"):
|
||||
"""
|
||||
Retrieves totalizer data from a JSON file.
|
||||
|
||||
:param file_path: Path to the JSON file storing totalizer data.
|
||||
:return: Dictionary containing totalizer values.
|
||||
"""
|
||||
try:
|
||||
with open(file_path, "r") as t:
|
||||
totalizers = json.load(t)
|
||||
if not totalizers or not isinstance(totalizers, dict):
|
||||
logger.info("Invalid data format in the file. Initializing totalizers.")
|
||||
totalizers = initialize_totalizers()
|
||||
except FileNotFoundError:
|
||||
logger.info("File not found. Initializing totalizers.")
|
||||
totalizers = initialize_totalizers()
|
||||
except json.JSONDecodeError:
|
||||
timestamp = dt.now().strftime("%Y%m%d_%H%M%S")
|
||||
# Split the file path and insert the timestamp before the extension
|
||||
file_name, file_extension = os.path.splitext(file_path)
|
||||
backup_file_path = f"{file_name}_{timestamp}{file_extension}"
|
||||
shutil.copyfile(file_path, backup_file_path)
|
||||
logger.error(f"Error decoding JSON. A backup of the file is created at {backup_file_path}. Initializing totalizers.")
|
||||
totalizers = initialize_totalizers()
|
||||
return totalizers
|
||||
|
||||
def saveTotalizers(totalizers, file_path="/var/user/files/totalizers.json"):
|
||||
"""
|
||||
Saves totalizer data to a JSON file.
|
||||
|
||||
:param totalizers: Dictionary containing totalizer values to be saved.
|
||||
:param file_path: Path to the JSON file where totalizer data will be saved.
|
||||
"""
|
||||
try:
|
||||
with open(file_path, "w") as t:
|
||||
json.dump(totalizers, t)
|
||||
except (IOError, OSError, json.JSONEncodeError) as e:
|
||||
logger.error(f"Error saving totalizers to {file_path}: {e}")
|
||||
raise # Optionally re-raise the exception if it should be handled by the caller
|
||||
|
||||
def chunk_payload(payload, chunk_size=20):
|
||||
if "values" in payload:
|
||||
# Original format: {"ts": ..., "values": {...}}
|
||||
chunked_values = list(payload["values"].items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
"ts": payload["ts"],
|
||||
"values": dict(chunked_values[i:i+chunk_size])
|
||||
}
|
||||
else:
|
||||
# New format: {"key1": "value1", "key2": "value2"}
|
||||
chunked_keys = list(payload.keys())
|
||||
for i in range(0, len(chunked_keys), chunk_size):
|
||||
yield {k: payload[k] for k in chunked_keys[i:i+chunk_size]}
|
||||
|
||||
|
||||
def chunk_payload_devices(payload, chunk_size=20, is_attributes_payload=False):
|
||||
if is_attributes_payload:
|
||||
# For attributes payload, chunk the controllers
|
||||
controllers = list(payload.items())
|
||||
for i in range(0, len(controllers), chunk_size):
|
||||
yield dict(controllers[i:i + chunk_size])
|
||||
else:
|
||||
# For data payload, chunk the values within each controller
|
||||
for controller, data in payload.items():
|
||||
for entry in data:
|
||||
ts = entry['ts']
|
||||
values = entry['values']
|
||||
chunked_values = list(values.items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
controller: [{
|
||||
"ts": ts,
|
||||
"values": dict(chunked_values[i:i + chunk_size])
|
||||
}]
|
||||
}
|
||||
|
||||
|
||||
def controlName(name):
|
||||
logger.debug(name)
|
||||
params = convertDStoJSON(get_params())
|
||||
logger.debug(params)
|
||||
nameMap = {
|
||||
"overflow_pump": f"{params['overflow_pump']}",
|
||||
"leak_detection": f"{params['leak_detection']}"
|
||||
}
|
||||
return nameMap.get(name, "Gateway")
|
||||
|
||||
|
||||
def sendData(message):
|
||||
# logger.info(message)
|
||||
rts1.loadDataFromFile()
|
||||
rts2.loadDataFromFile()
|
||||
grouped_data = {}
|
||||
grouped_attributes = {}
|
||||
now = (round(dt.timestamp(dt.now())/60)*60)*1000
|
||||
payload = {"ts": now, "values": {}}
|
||||
attributes_payload = {}
|
||||
logger.debug(message)
|
||||
resetPayloadLD = {}
|
||||
totalizerHolding = {}
|
||||
sendResetData = False
|
||||
for measure in message["measures"]:
|
||||
try:
|
||||
logger.debug(measure)
|
||||
ctrlName = controlName(measure["ctrlName"])
|
||||
logger.debug(ctrlName)
|
||||
if ctrlName == "Gateway":
|
||||
# send to gateway with v1/devices/me/telemetry
|
||||
if measure["health"] == 1:
|
||||
if "_spt" in measure["name"]:
|
||||
attributes_payload[measure["name"]] = measure["value"]
|
||||
else:
|
||||
payload["values"][measure["name"]] = measure["value"]
|
||||
else:
|
||||
name = measure['name']
|
||||
value = measure['value']
|
||||
health = measure['health']
|
||||
# Add controller for telemetry if it doesn't exist
|
||||
if ctrlName not in grouped_data:
|
||||
grouped_data[ctrlName] = {}
|
||||
# Add controller for attributes if it doesn't exist
|
||||
if ctrlName not in grouped_attributes:
|
||||
grouped_attributes[ctrlName] = {}
|
||||
grouped_attributes[ctrlName]["latestReportTime"] = now
|
||||
# Add data to temp payload if datapoint health is good
|
||||
if health:
|
||||
if "_spt" in name:
|
||||
grouped_attributes[ctrlName][name] = value
|
||||
else:
|
||||
grouped_data[ctrlName][name] = value
|
||||
if name in ["p001_run"]:
|
||||
rts1.manageTime()
|
||||
if value == 1 and not rts1.runs[rts1.todayString]["run_" + str(rts1.currentRun)]["start"]:
|
||||
rts1.startRun()
|
||||
rts1.saveDataToFile()
|
||||
elif value == 0 and rts1.runs[rts1.todayString]["run_" + str(rts1.currentRun)]["start"] and not rts1.runs[rts1.todayString]["run_" + str(rts1.currentRun)]["end"]:
|
||||
rts1.endRun()
|
||||
rts1.saveDataToFile()
|
||||
grouped_data[ctrlName][name + "_today_running_hours"] = rts1.calculateRunTimeDay()
|
||||
grouped_data[ctrlName][name + "_month_running_hours"] = rts1.calculateRunTimeMultiDay(numDays=dt.today().day)
|
||||
if name in ["p002_run"]:
|
||||
rts2.manageTime()
|
||||
if value == 1 and not rts2.runs[rts2.todayString]["run_" + str(rts2.currentRun)]["start"]:
|
||||
rts2.startRun()
|
||||
rts2.saveDataToFile()
|
||||
elif value == 0 and rts2.runs[rts2.todayString]["run_" + str(rts2.currentRun)]["start"] and not rts2.runs[rts2.todayString]["run_" + str(rts2.currentRun)]["end"]:
|
||||
rts2.endRun()
|
||||
rts2.saveDataToFile()
|
||||
grouped_data[ctrlName][name + "_today_running_hours"] = rts2.calculateRunTimeDay()
|
||||
grouped_data[ctrlName][name + "_month_running_hours"] = rts2.calculateRunTimeMultiDay(numDays=dt.today().day)
|
||||
if name in ["fm_1_total"]:
|
||||
if not totalizerHolding.get(ctrlName):
|
||||
totalizerHolding[ctrlName] = {"fm_1":{"dayReset": False, "weekReset": False, "monthReset": False, "yearReset": False}}
|
||||
if not totalizerHolding[ctrlName].get("fm_1"):
|
||||
totalizerHolding[ctrlName]["fm_1"] = {"dayReset": False, "weekReset": False, "monthReset": False, "yearReset": False}
|
||||
totalizerHolding[ctrlName]["fm_1"]["totalizer_1"] = value
|
||||
if name in ["fm_2_total"]:
|
||||
if not totalizerHolding.get(ctrlName):
|
||||
totalizerHolding[ctrlName] = {"fm_2":{"dayReset": False, "weekReset": False, "monthReset": False, "yearReset": False}}
|
||||
if not totalizerHolding[ctrlName].get("fm_2"):
|
||||
totalizerHolding[ctrlName]["fm_2"] = {"dayReset": False, "weekReset": False, "monthReset": False, "yearReset": False}
|
||||
totalizerHolding[ctrlName]["fm_2"]["totalizer_1"] = value
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
for controller, meters in totalizerHolding.items():
|
||||
resetPayloadLD[controller] = [{"ts": now + 1, "values": {}}]
|
||||
for meter, values in meters.items():
|
||||
if meter == "fm_1":
|
||||
file_path = "/var/user/files/totalizers_ld_fm_1.json"
|
||||
elif meter == "fm_2":
|
||||
file_path = "/var/user/files/totalizers_ld_fm_2.json"
|
||||
else:
|
||||
file_path = "/var/user/files/totalizers.json"
|
||||
|
||||
grouped_data[controller][f"{meter}_year_total"], values["yearReset"] = totalizeYear(values["totalizer_1"],file_path=file_path)
|
||||
grouped_data[controller][f"{meter}_month_total"], values["monthReset"] = totalizeMonth(values["totalizer_1"],file_path=file_path)
|
||||
grouped_data[controller][f"{meter}_week_total"], values["weekReset"] = totalizeWeek(values["totalizer_1"],file_path=file_path)
|
||||
grouped_data[controller][f"{meter}_today_total"], values["dayReset"] = totalizeDay(values["totalizer_1"],file_path=file_path)
|
||||
|
||||
if values["dayReset"]:
|
||||
resetPayloadLD[controller][0]["values"][f"{meter}_yesterday_total"] = grouped_data[controller][f"{meter}_today_total"]
|
||||
resetPayloadLD[controller][0]["values"][f"{meter}_today_total"] = 0
|
||||
sendResetData = True
|
||||
if values["weekReset"]:
|
||||
resetPayloadLD[controller][0]["values"][f"{meter}_last_week_total"] = grouped_data[controller][f"{meter}_week_total"]
|
||||
resetPayloadLD[controller][0]["values"][f"{meter}_week_total"] = 0
|
||||
sendResetData = True
|
||||
if values["monthReset"]:
|
||||
resetPayloadLD[controller][0]["values"][f"{meter}_last_month_total"] = grouped_data[controller][f"{meter}_month_total"]
|
||||
resetPayloadLD[controller][0]["values"][f"{meter}_month_total"] = 0
|
||||
sendResetData = True
|
||||
if values["yearReset"]:
|
||||
resetPayloadLD[controller][0]["values"][f"{meter}_last_year_total"] = grouped_data[controller][f"{meter}_year_total"]
|
||||
resetPayloadLD[controller][0]["values"][f"{meter}_year_total"] = 0
|
||||
sendResetData = True
|
||||
|
||||
|
||||
|
||||
# Transform the grouped data to desired structure
|
||||
payload_devices = {}
|
||||
#logger.info(grouped_data)
|
||||
for key, value in grouped_data.items():
|
||||
if value:
|
||||
payload_devices[key] = [{"ts": now, "values": value}]
|
||||
|
||||
mergedPayload = {
|
||||
k: payload_devices.get(k, []) + [item for item in resetPayloadLD.get(k, []) if item.get('values')]
|
||||
for k in set(payload_devices) | set(resetPayloadLD)
|
||||
}
|
||||
attributes_payload_devices = {}
|
||||
for key, value in grouped_attributes.items():
|
||||
if value:
|
||||
attributes_payload_devices[key] = value
|
||||
#logger.info(payload_devices)
|
||||
#logger.info(attributes_payload_devices)
|
||||
#logger.info(payload)
|
||||
#logger.info(attributes)
|
||||
# Send data belonging to Gateway
|
||||
for chunk in chunk_payload(payload=payload):
|
||||
publish(__topic__, json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
attributes_payload["latestReportTime"] = (
|
||||
round(dt.timestamp(dt.now())/60)*60)*1000
|
||||
for chunk in chunk_payload(payload=attributes_payload):
|
||||
publish("v1/devices/me/attributes", json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
# Send gateway devices data
|
||||
for chunk in chunk_payload_devices(payload=mergedPayload):
|
||||
publish("v1/gateway/telemetry", json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
for chunk in chunk_payload_devices(payload=attributes_payload_devices, is_attributes_payload=True):
|
||||
publish("v1/gateway/attributes",
|
||||
json.dumps(attributes_payload_devices), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
|
||||
def totalizeDay(lifetime, max_retries=3, retry_delay=2, file_path="/var/user/files/totalizers.json"):
|
||||
"""
|
||||
Update and save daily totalizers based on the lifetime value.
|
||||
|
||||
:param lifetime: The current lifetime total.
|
||||
:param max_retries: Maximum number of save attempts.
|
||||
:param retry_delay: Delay in seconds between retries.
|
||||
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
|
||||
"""
|
||||
totalizers = getTotalizers(file_path=file_path)
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
value = lifetime - totalizers["dayHolding"]
|
||||
|
||||
if not int(now.strftime("%d")) == int(totalizers["day"]):
|
||||
totalizers["dayHolding"] = lifetime
|
||||
totalizers["day"] = int(now.strftime("%d"))
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
saveTotalizers(totalizers, file_path=file_path)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
except Exception as e:
|
||||
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
|
||||
if attempt < max_retries - 1:
|
||||
time.sleep(retry_delay)
|
||||
else:
|
||||
logger.error("All attempts to save totalizers failed.")
|
||||
return (None, False)
|
||||
|
||||
return (value, reset)
|
||||
|
||||
def totalizeWeek(lifetime, max_retries=3, retry_delay=2, file_path="/var/user/files/totalizers.json"):
|
||||
"""
|
||||
Update and save weekly totalizers based on the lifetime value.
|
||||
|
||||
:param lifetime: The current lifetime total.
|
||||
:param max_retries: Maximum number of save attempts.
|
||||
:param retry_delay: Delay in seconds between retries.
|
||||
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
|
||||
"""
|
||||
totalizers = getTotalizers(file_path=file_path)
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
value = lifetime - totalizers["weekHolding"]
|
||||
|
||||
if (not now.strftime("%U") == totalizers["week"] and now.strftime("%a") == "Sun") or totalizers["week"] == 0:
|
||||
totalizers["weekHolding"] = lifetime
|
||||
totalizers["week"] = now.strftime("%U")
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
saveTotalizers(totalizers, file_path=file_path)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
except Exception as e:
|
||||
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
|
||||
if attempt < max_retries - 1:
|
||||
time.sleep(retry_delay)
|
||||
else:
|
||||
logger.error("All attempts to save totalizers failed.")
|
||||
return (None, False)
|
||||
return (value, reset)
|
||||
|
||||
def totalizeMonth(lifetime, max_retries=3, retry_delay=2, file_path="/var/user/files/totalizers.json"):
|
||||
"""
|
||||
Update and save monthly totalizers based on the lifetime value.
|
||||
|
||||
:param lifetime: The current lifetime total.
|
||||
:param max_retries: Maximum number of save attempts.
|
||||
:param retry_delay: Delay in seconds between retries.
|
||||
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
|
||||
"""
|
||||
totalizers = getTotalizers(file_path=file_path)
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
value = lifetime - totalizers["monthHolding"]
|
||||
|
||||
if not int(now.strftime("%m")) == int(totalizers["month"]):
|
||||
totalizers["monthHolding"] = lifetime
|
||||
totalizers["month"] = now.strftime("%m")
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
saveTotalizers(totalizers, file_path=file_path)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
except Exception as e:
|
||||
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
|
||||
if attempt < max_retries - 1:
|
||||
time.sleep(retry_delay)
|
||||
else:
|
||||
logger.error("All attempts to save totalizers failed.")
|
||||
return (None, False)
|
||||
|
||||
return (value,reset)
|
||||
|
||||
def totalizeYear(lifetime, max_retries=3, retry_delay=2, file_path="/var/user/files/totalizers.json"):
|
||||
"""
|
||||
Update and save yearly totalizers based on the lifetime value.
|
||||
|
||||
:param lifetime: The current lifetime total.
|
||||
:param max_retries: Maximum number of save attempts.
|
||||
:param retry_delay: Delay in seconds between retries.
|
||||
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
|
||||
"""
|
||||
totalizers = getTotalizers(file_path=file_path)
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
value = lifetime - totalizers["yearHolding"]
|
||||
|
||||
if not int(now.strftime("%Y")) == int(totalizers["year"]):
|
||||
totalizers["yearHolding"] = lifetime
|
||||
totalizers["year"] = now.strftime("%Y")
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
saveTotalizers(totalizers, file_path=file_path)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
except Exception as e:
|
||||
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
|
||||
if attempt < max_retries - 1:
|
||||
time.sleep(retry_delay)
|
||||
else:
|
||||
logger.error("All attempts to save totalizers failed.")
|
||||
return (None, False)
|
||||
@@ -14,7 +14,8 @@ def convertDStoJSON(ds):
|
||||
def formatPLCPayload(device, key, value):
|
||||
params = convertDStoJSON(get_params())
|
||||
nameMap = {
|
||||
f"{params['overflow_pump']}": "overflow_pump"
|
||||
f"{params['overflow_pump']}": "overflow_pump",
|
||||
f"{params['leak_detection']}": "leak_detection"
|
||||
}
|
||||
measure = key
|
||||
device = nameMap.get(device, "")
|
||||
@@ -64,7 +65,8 @@ def controlName(name):
|
||||
params = convertDStoJSON(get_params())
|
||||
logger.debug(params)
|
||||
nameMap = {
|
||||
"overflow_pump": f"{params['overflow_pump']}"
|
||||
"overflow_pump": f"{params['overflow_pump']}",
|
||||
"leak_detection": f"{params['leak_detection']}"
|
||||
}
|
||||
return nameMap.get(name, "Gateway")
|
||||
|
||||
@@ -163,9 +165,10 @@ def sync(device_filter=[]):
|
||||
def writeplctag(value):
|
||||
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
|
||||
try:
|
||||
logger.debug(value)
|
||||
logger.info(value)
|
||||
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
|
||||
message = [{"name": value["device"], "measures": [{"name": value["measurement"], "value": value["value"]}]}]
|
||||
logger.info(message)
|
||||
resp = write(message)
|
||||
logger.debug("RETURN FROM WRITE: {}".format(resp))
|
||||
return True
|
||||
@@ -177,11 +180,17 @@ def writeplctag(value):
|
||||
def receiveAttribute(topic, payload):
|
||||
try:
|
||||
logger.debug(topic)
|
||||
logger.debug(json.loads(payload))
|
||||
logger.info(json.loads(payload))
|
||||
p = json.loads(payload)
|
||||
device = p["device"]
|
||||
for key, value in p["data"].items():
|
||||
try:
|
||||
if key == 'pond_1_lo_spt':
|
||||
measure = formatPLCPayload(device, 'pond_1_lo_clr_spt', value + 0.5)
|
||||
result = writeplctag(measure)
|
||||
elif key == 'pond_2_lo_spt':
|
||||
measure = formatPLCPayload(device, 'pond_2_lo_clr_spt', value + 0.5)
|
||||
result = writeplctag(measure)
|
||||
measure = formatPLCPayload(device, key, value)
|
||||
result = writeplctag(measure)
|
||||
logger.debug(result)
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
AlarmName,ControllerName,MeasuringPointName,AlarmLevel,Condition1,Operand1,CombineMethod,Condition2,Operand2,AlarmContent,AlarmTag
|
||||
tp_1_hi_a_winding_alm,rr_facility,tp_1_hi_a_winding_alm,5,eq,1,none,eq,,tp 1 hi a winding failure,default
|
||||
tp_1_hi_b_winding_alm,rr_facility,tp_1_hi_b_winding_alm,5,eq,1,none,eq,,tp 1 hi b winding failure,default
|
||||
tp_1_hi_c_winding_alm,rr_facility,tp_1_hi_c_winding_alm,5,eq,1,none,eq,,tp 1 hi c winding failure,default
|
||||
tp_1_hi_discharge_alm,rr_facility,tp_1_hi_discharge_alm,5,eq,1,none,eq,,tp 1 hi discharge failure,default
|
||||
tp_1_hi_inboard_temp_alm,rr_facility,tp_1_hi_inboard_temp_alm,5,eq,1,none,eq,,Failure,default
|
||||
tp_1_hi_outboard_temp_alm,rr_facility,tp_1_hi_outboard_temp_alm,5,eq,1,none,eq,,Failure,default
|
||||
tp_1_hi_vibration_alm,rr_facility,tp_1_hi_vibration_alm,5,eq,1,none,eq,,Failure,default
|
||||
tp_1_lo_discharge_alm,rr_facility,tp_1_lo_discharge_alm,5,eq,1,none,eq,,Failure,default
|
||||
tp_1_lo_oil_alm,rr_facility,tp_1_lo_oil_alm,5,eq,1,none,eq,,Failure,default
|
||||
tp_1_lo_suction_alm,rr_facility,tp_1_lo_suction_alm,5,eq,1,none,eq,,Failure,default
|
||||
tp_1_oil_cooler_failed_to_start_alm,rr_facility,tp_1_oil_cooler_failed_to_start_alm,5,eq,1,none,eq,,Failure,default
|
||||
tp_1_hi_a_winding_alm,rr_facility,tp_1_hi_a_winding_alm,5,eq,1.0,none,eq,,tp 1 hi a winding failure,default
|
||||
tp_1_hi_b_winding_alm,rr_facility,tp_1_hi_b_winding_alm,5,eq,1.0,none,eq,,tp 1 hi b winding failure,default
|
||||
tp_1_hi_c_winding_alm,rr_facility,tp_1_hi_c_winding_alm,5,eq,1.0,none,eq,,tp 1 hi c winding failure,default
|
||||
tp_1_hi_discharge_alm,rr_facility,tp_1_hi_discharge_alm,5,eq,1.0,none,eq,,tp 1 hi discharge failure,default
|
||||
tp_1_hi_inboard_temp_alm,rr_facility,tp_1_hi_inboard_temp_alm,5,eq,1.0,none,eq,,Failure,default
|
||||
tp_1_hi_outboard_temp_alm,rr_facility,tp_1_hi_outboard_temp_alm,5,eq,1.0,none,eq,,Failure,default
|
||||
tp_1_hi_vibration_alm,rr_facility,tp_1_hi_vibration_alm,5,eq,1.0,none,eq,,Failure,default
|
||||
tp_1_lo_discharge_alm,rr_facility,tp_1_lo_discharge_alm,5,eq,1.0,none,eq,,Failure,default
|
||||
tp_1_lo_oil_alm,rr_facility,tp_1_lo_oil_alm,5,eq,1.0,none,eq,,Failure,default
|
||||
tp_1_lo_suction_alm,rr_facility,tp_1_lo_suction_alm,5,eq,1.0,none,eq,,Failure,default
|
||||
tp_1_oil_cooler_failed_to_start_alm,rr_facility,tp_1_oil_cooler_failed_to_start_alm,5,eq,1.0,none,eq,,Failure,default
|
||||
tp_1_pid_auto_cmd_auto,rr_facility,tp_1_pid_auto_cmd,5,eq,1,none,eq,,Moved to Auto,default
|
||||
tp_1_pid_auto_cmd_manual,rr_facility,tp_1_pid_auto_cmd,1,eq,0,none,eq,,Moved to Manual,default
|
||||
tp_1_vfd_faulted_alm,rr_facility,tp_1_vfd_faulted_alm,5,eq,1,none,eq,,Failure,default
|
||||
|
Reference in New Issue
Block a user