added rr facility
This commit is contained in:
509
Pub_Sub/piflow/thingsboard/pub/sendData.py
Normal file
509
Pub_Sub/piflow/thingsboard/pub/sendData.py
Normal file
@@ -0,0 +1,509 @@
|
||||
# Enter your python code.
|
||||
import json, os, time, shutil
|
||||
from datetime import datetime as dt
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from quickfaas.global_dict import get as get_params
|
||||
from quickfaas.global_dict import _set_global_args
|
||||
from mobiuspi_lib.gps import GPS
|
||||
|
||||
payload = {}
|
||||
|
||||
def initialize_totalizers():
|
||||
return {
|
||||
"totalizer_1_day": 0,
|
||||
"totalizer_1_week": 0,
|
||||
"totalizer_1_month": 0,
|
||||
"totalizer_1_year": 0,
|
||||
"totalizer_1_lifetime": 0,
|
||||
"totalizer_1_dayHolding": 0,
|
||||
"totalizer_1_weekHolding": 0,
|
||||
"totalizer_1_monthHolding": 0,
|
||||
"totalizer_1_yearHolding": 0,
|
||||
"totalizer_1_rolloverCounter": 0,
|
||||
"totalizer_2_day": 0,
|
||||
"totalizer_2_week": 0,
|
||||
"totalizer_2_month": 0,
|
||||
"totalizer_2_year": 0,
|
||||
"totalizer_2_lifetime": 0,
|
||||
"totalizer_2_dayHolding": 0,
|
||||
"totalizer_2_weekHolding": 0,
|
||||
"totalizer_2_monthHolding": 0,
|
||||
"totalizer_2_yearHolding": 0,
|
||||
"totalizer_2_rolloverCounter": 0,
|
||||
"totalizer_3_day": 0,
|
||||
"totalizer_3_week": 0,
|
||||
"totalizer_3_month": 0,
|
||||
"totalizer_3_year": 0,
|
||||
"totalizer_3_lifetime": 0,
|
||||
"totalizer_3_dayHolding": 0,
|
||||
"totalizer_3_weekHolding": 0,
|
||||
"totalizer_3_monthHolding": 0,
|
||||
"totalizer_3_yearHolding": 0,
|
||||
"totalizer_3_rolloverCounter": 0
|
||||
}
|
||||
|
||||
def getTotalizers(file_path="/var/user/files/totalizers.json"):
|
||||
"""
|
||||
Retrieves totalizer data from a JSON file.
|
||||
|
||||
:param file_path: Path to the JSON file storing totalizer data.
|
||||
:return: Dictionary containing totalizer values.
|
||||
"""
|
||||
try:
|
||||
with open(file_path, "r") as t:
|
||||
totalizers = json.load(t)
|
||||
if not totalizers or not isinstance(totalizers, dict):
|
||||
logger.info("Invalid data format in the file. Initializing totalizers.")
|
||||
totalizers = initialize_totalizers()
|
||||
except FileNotFoundError:
|
||||
logger.info("File not found. Initializing totalizers.")
|
||||
totalizers = initialize_totalizers()
|
||||
except json.JSONDecodeError:
|
||||
timestamp = dt.now().strftime("%Y%m%d_%H%M%S")
|
||||
# Split the file path and insert the timestamp before the extension
|
||||
file_name, file_extension = os.path.splitext(file_path)
|
||||
backup_file_path = f"{file_name}_{timestamp}{file_extension}"
|
||||
shutil.copyfile(file_path, backup_file_path)
|
||||
logger.error(f"Error decoding JSON. A backup of the file is created at {backup_file_path}. Initializing totalizers.")
|
||||
totalizers = initialize_totalizers()
|
||||
return totalizers
|
||||
|
||||
|
||||
def chunk_payload(payload, chunk_size=20):
|
||||
if "values" in payload:
|
||||
# Original format: {"ts": ..., "values": {...}}
|
||||
chunked_values = list(payload["values"].items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
"ts": payload["ts"],
|
||||
"values": dict(chunked_values[i:i+chunk_size])
|
||||
}
|
||||
else:
|
||||
# New format: {"key1": "value1", "key2": "value2"}
|
||||
chunked_keys = list(payload.keys())
|
||||
for i in range(0, len(chunked_keys), chunk_size):
|
||||
yield {k: payload[k] for k in chunked_keys[i:i+chunk_size]}
|
||||
|
||||
def sendData(message):
|
||||
logger.debug(message)
|
||||
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
||||
resetPayload = {"ts": "", "values": {}}
|
||||
dayReset, weekReset, monthReset, yearReset = False, False, False, False
|
||||
for measure in message['measures']:
|
||||
try:
|
||||
if measure["health"] == 1:
|
||||
if measure["name"] in ["totalizer_1", "totalizer_2", "totalizer_3"]:
|
||||
payload["values"]["year_" + measure["name"]], yearReset = totalizeYear(measure["value"], measure["name"])
|
||||
payload["values"]["month_" + measure["name"]], monthReset = totalizeMonth(measure["value"], measure["name"])
|
||||
payload["values"]["week_" + measure["name"]], weekReset = totalizeWeek(measure["value"], measure["name"])
|
||||
payload["values"]["today_" + measure["name"]], dayReset = totalizeDay(measure["value"], measure["name"])
|
||||
elif measure["name"] in ["totalizer_1_units", "totalizer_2_units", "totalizer_3_units", "flow_unit", "run_status", "fault_a", "fault_b"]:
|
||||
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
|
||||
else:
|
||||
payload["values"][measure["name"]] = measure["value"]
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
for chunk in chunk_payload(payload=payload):
|
||||
publish(__topic__, json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
try:
|
||||
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
if dayReset:
|
||||
resetPayload["values"]["yesterday_totalizer_1"] = payload["values"]["today_totalizer_1"]
|
||||
resetPayload["values"]["yesterday_totalizer_2"] = payload["values"]["today_totalizer_2"]
|
||||
resetPayload["values"]["yesterday_totalizer_3"] = payload["values"]["today_totalizer_3"]
|
||||
resetPayload["values"]["today_totalizer_1"] = 0
|
||||
resetPayload["values"]["today_totalizer_2"] = 0
|
||||
resetPayload["values"]["today_totalizer_3"] = 0
|
||||
if weekReset:
|
||||
resetPayload["values"]["last_week_totalizer_1"] = payload["values"]["week_totalizer_1"]
|
||||
resetPayload["values"]["last_week_totalizer_2"] = payload["values"]["week_totalizer_1"]
|
||||
resetPayload["values"]["last_week_totalizer_3"] = payload["values"]["week_totalizer_3"]
|
||||
resetPayload["values"]["week_totalizer_1"] = 0
|
||||
resetPayload["values"]["week_totalizer_2"] = 0
|
||||
resetPayload["values"]["week_totalizer_3"] = 0
|
||||
if monthReset:
|
||||
resetPayload["values"]["last_month_totalizer_1"] = payload["values"]["month_totalizer_1"]
|
||||
resetPayload["values"]["last_month_totalizer_2"] = payload["values"]["month_totalizer_2"]
|
||||
resetPayload["values"]["last_month_totalizer_3"] = payload["values"]["month_totalizer_3"]
|
||||
resetPayload["values"]["month_totalizer_1"] = 0
|
||||
resetPayload["values"]["month_totalizer_2"] = 0
|
||||
resetPayload["values"]["month_totalizer_3"] = 0
|
||||
if yearReset:
|
||||
resetPayload["values"]["last_year_totalizer_1"] = payload["values"]["year_totalizer_1"]
|
||||
resetPayload["values"]["last_year_totalizer_2"] = payload["values"]["year_totalizer_2"]
|
||||
resetPayload["values"]["last_year_totalizer_3"] = payload["values"]["year_totalizer_3"]
|
||||
resetPayload["values"]["year_totalizer_1"] = 0
|
||||
resetPayload["values"]["year_totalizer_2"] = 0
|
||||
resetPayload["values"]["year_totalizer_3"] = 0
|
||||
|
||||
if resetPayload["values"]:
|
||||
resetPayload["ts"] = 1 + (round(dt.timestamp(dt.now())/600)*600)*1000
|
||||
publish(__topic__, json.dumps(resetPayload), __qos__)
|
||||
|
||||
def saveTotalizers(totalizers, file_path="/var/user/files/totalizers.json"):
|
||||
"""
|
||||
Saves totalizer data to a JSON file.
|
||||
|
||||
:param totalizers: Dictionary containing totalizer values to be saved.
|
||||
:param file_path: Path to the JSON file where totalizer data will be saved.
|
||||
"""
|
||||
try:
|
||||
with open(file_path, "w") as t:
|
||||
json.dump(totalizers, t)
|
||||
except (IOError, OSError, json.JSONEncodeError) as e:
|
||||
logger.error(f"Error saving totalizers to {file_path}: {e}")
|
||||
raise # Optionally re-raise the exception if it should be handled by the caller
|
||||
|
||||
def totalizeDay(lifetime, totalizer, max_retries=3, retry_delay=2):
|
||||
"""
|
||||
Update and save daily totalizers based on the lifetime value.
|
||||
|
||||
:param lifetime: The current lifetime total.
|
||||
:param max_retries: Maximum number of save attempts.
|
||||
:param retry_delay: Delay in seconds between retries.
|
||||
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
|
||||
"""
|
||||
totalizers = getTotalizers()
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
value = (lifetime + totalizers[totalizer + "_rolloverCounter"] * 10000000) - totalizers[totalizer + "_dayHolding"]
|
||||
|
||||
if not int(now.strftime("%d")) == int(totalizers[totalizer + "_day"]):
|
||||
totalizers[totalizer + "_dayHolding"] = lifetime + totalizers[totalizer + "_rolloverCounter"] * 10000000
|
||||
totalizers[totalizer + "_day"] = int(now.strftime("%d"))
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
except Exception as e:
|
||||
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
|
||||
if attempt < max_retries - 1:
|
||||
time.sleep(retry_delay)
|
||||
else:
|
||||
logger.error("All attempts to save totalizers failed.")
|
||||
return (None, False)
|
||||
return (value, reset)
|
||||
|
||||
def totalizeWeek(lifetime, totalizer, max_retries=3, retry_delay=2):
|
||||
"""
|
||||
Update and save weekly totalizers based on the lifetime value.
|
||||
|
||||
:param lifetime: The current lifetime total.
|
||||
:param max_retries: Maximum number of save attempts.
|
||||
:param retry_delay: Delay in seconds between retries.
|
||||
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
|
||||
"""
|
||||
totalizers = getTotalizers()
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
value = (lifetime + totalizers[totalizer + "_rolloverCounter"] * 10000000) - totalizers[totalizer + "_weekHolding"]
|
||||
if (not now.strftime("%U") == totalizers[totalizer + "_week"] and now.strftime("%a") == "Sun") or totalizers[totalizer + "_week"] == 0:
|
||||
totalizers[totalizer + "_weekHolding"] = lifetime + totalizers[totalizer + "_rolloverCounter"] * 10000000
|
||||
totalizers[totalizer + "_week"] = now.strftime("%U")
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
except Exception as e:
|
||||
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
|
||||
if attempt < max_retries - 1:
|
||||
time.sleep(retry_delay)
|
||||
else:
|
||||
logger.error("All attempts to save totalizers failed.")
|
||||
return (None, False)
|
||||
return (value, reset)
|
||||
|
||||
def totalizeMonth(lifetime, totalizer, max_retries=3, retry_delay=2):
|
||||
"""
|
||||
Update and save monthly totalizers based on the lifetime value.
|
||||
|
||||
:param lifetime: The current lifetime total.
|
||||
:param max_retries: Maximum number of save attempts.
|
||||
:param retry_delay: Delay in seconds between retries.
|
||||
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
|
||||
"""
|
||||
totalizers = getTotalizers()
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
value = (lifetime + totalizers[totalizer + "_rolloverCounter"] * 10000000) - totalizers[totalizer + "_monthHolding"]
|
||||
if not int(now.strftime("%m")) == int(totalizers[totalizer + "_month"]):
|
||||
totalizers[totalizer + "_monthHolding"] = lifetime + totalizers[totalizer + "_rolloverCounter"] * 10000000
|
||||
totalizers[totalizer + "_month"] = now.strftime("%m")
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
except Exception as e:
|
||||
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
|
||||
if attempt < max_retries - 1:
|
||||
time.sleep(retry_delay)
|
||||
else:
|
||||
logger.error("All attempts to save totalizers failed.")
|
||||
return (None, False)
|
||||
|
||||
return (value,reset)
|
||||
|
||||
def totalizeYear(lifetime, totalizer, max_retries=3, retry_delay=2):
|
||||
"""
|
||||
Update and save yearly totalizers based on the lifetime value.
|
||||
|
||||
:param lifetime: The current lifetime total.
|
||||
:param max_retries: Maximum number of save attempts.
|
||||
:param retry_delay: Delay in seconds between retries.
|
||||
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
|
||||
"""
|
||||
totalizers = getTotalizers()
|
||||
if not totalizers.get("rolloverCounter"):
|
||||
totalizers[totalizer + "_rolloverCounter"] = 0
|
||||
totalizers[totalizer + "_lastRawLifetime"] = lifetime
|
||||
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
|
||||
reset = False
|
||||
if totalizers[totalizer + "_lastRawLifetime"] > lifetime:
|
||||
totalizers[totalizer + "_rolloverCounter"] += 1
|
||||
totalizers[totalizer + "_lastRawLifetime"] = lifetime
|
||||
|
||||
value = (lifetime + totalizers[totalizer + "_rolloverCounter"] * 10000000) - totalizers[totalizer + "_yearHolding"]
|
||||
if not int(now.strftime("%Y")) == int(totalizers[totalizer + "_year"]):
|
||||
totalizers[totalizer + "_yearHolding"] = lifetime
|
||||
totalizers[totalizer + "_year"] = now.strftime("%Y")
|
||||
totalizers[totalizer + "_rolloverCounter"] = 0
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
except Exception as e:
|
||||
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
|
||||
if attempt < max_retries - 1:
|
||||
time.sleep(retry_delay)
|
||||
else:
|
||||
logger.error("All attempts to save totalizers failed.")
|
||||
return (None, False)
|
||||
|
||||
def decode_status(value):
|
||||
drive_status_codes = {
|
||||
15: "Reversed",
|
||||
14: "REM. Freq",
|
||||
13: "REM. R/S",
|
||||
12: "Reverse operation command",
|
||||
11: "Forward operation command",
|
||||
10: "Brake release signal",
|
||||
9: "-(Not used)",
|
||||
8: "Drive stopped.",
|
||||
7: "DC Braking",
|
||||
6: "Speed reached",
|
||||
5: "Decelerating",
|
||||
4: "Accelerating",
|
||||
3: "Fault trip",
|
||||
2: "Operating in reverse direction",
|
||||
1: "Operating in forward direction",
|
||||
0: "Stopped",
|
||||
}
|
||||
# List to collect the active bits' descriptions
|
||||
active_bits = []
|
||||
|
||||
# Iterate through each bit
|
||||
for bit, description in drive_status_codes.items():
|
||||
if value & (1 << bit): # Check if the bit is set
|
||||
active_bits.append(description)
|
||||
|
||||
return ", ".join(active_bits)
|
||||
|
||||
def convert_int(measure, value):
|
||||
|
||||
totalizer_codes = {
|
||||
0: "cm cubed",
|
||||
1: "dm cubed",
|
||||
2: "m cubed",
|
||||
3: "ml",
|
||||
4: "l",
|
||||
5: "hl",
|
||||
6: "Ml Mega",
|
||||
8: "af",
|
||||
9: "ft cubed",
|
||||
10: "fl oz (us)",
|
||||
11: "gal (us)",
|
||||
12: "Mgal (us)",
|
||||
13: "bbl (us;liq.)",
|
||||
14: "bbl (us;beer)",
|
||||
15: "bbl (us;oil)",
|
||||
16: "bbl (us;tank)",
|
||||
17: "gal (imp)",
|
||||
18: "Mgal (imp)",
|
||||
19: "bbl (imp;beer)",
|
||||
20: "bbl (imp;oil)",
|
||||
22: "kgal (us)",
|
||||
23: "Mft cubed",
|
||||
50: "g",
|
||||
51: "kg",
|
||||
52: "t",
|
||||
53: "oz",
|
||||
54: "lb",
|
||||
55: "STon",
|
||||
100: "Nl",
|
||||
101: "Nm cubed",
|
||||
102: "Sm cubed",
|
||||
103: "Sft cubed",
|
||||
104: "Sl",
|
||||
105: "Sgal (us)",
|
||||
106: "Sbbl (us;liq.)",
|
||||
107: "Sgal (imp)",
|
||||
108: "Sbbl (us;oil)",
|
||||
109: "MMSft cubed",
|
||||
110: "Nhl",
|
||||
251: "None"
|
||||
}
|
||||
|
||||
flow_codes = {
|
||||
0: "cm cubed/s",
|
||||
1: "cm cubed/min",
|
||||
2: "cm cubed/h",
|
||||
3: "cm cubed/d",
|
||||
4: "dm cubed/s",
|
||||
5: "dm cubed/min",
|
||||
6: "dm cubed/h",
|
||||
7: "dm cubed/d",
|
||||
8: "m cubed/s",
|
||||
9: "m cubed/min",
|
||||
10: "m cubed/h",
|
||||
11: "m cubed/d",
|
||||
12: "ml/s",
|
||||
13: "ml/min",
|
||||
14: "ml/h",
|
||||
15: "ml/d",
|
||||
16: "l/s",
|
||||
17: "l/min",
|
||||
18: "l/h (+)",
|
||||
19: "l/d",
|
||||
20: "hl/s",
|
||||
21: "hl/min",
|
||||
22: "hl/h",
|
||||
23: "hl/d",
|
||||
24: "Ml/s",
|
||||
25: "Ml/min",
|
||||
26: "Ml/h",
|
||||
27: "Ml/d",
|
||||
32: "af/s",
|
||||
33: "af/min",
|
||||
34: "af/h",
|
||||
35: "af/d",
|
||||
36: "ft cubed/s",
|
||||
37: "ft cubed/min",
|
||||
38: "ft cubed/h",
|
||||
39: "ft cubed/d",
|
||||
40: "fl oz/s (us)",
|
||||
41: "fl oz/min (us)",
|
||||
42: "fl oz/h (us)",
|
||||
43: "fl oz/d (us)",
|
||||
44: "gal/s (us)",
|
||||
45: "gal/min (us)",
|
||||
46: "gal/h (us)",
|
||||
47: "gal/d (us)",
|
||||
48: "Mgal/s (us)",
|
||||
49: "Mgal/min (us)",
|
||||
50: "Mgal/h (us)",
|
||||
51: "Mgal/d (us)",
|
||||
52: "bbl/s (us;liq.)",
|
||||
53: "bbl/min (us;liq.)",
|
||||
54: "bbl/h (us;liq.)",
|
||||
55: "bbl/d (us;liq.)",
|
||||
56: "bbl/s (us;beer)",
|
||||
57: "bbl/min (us;beer)",
|
||||
58: "bbl/h (us;beer)",
|
||||
59: "bbl/d (us;beer)",
|
||||
60: "bbl/s (us;oil)",
|
||||
61: "bbl/min (us;oil)",
|
||||
62: "bbl/h (us;oil)",
|
||||
63: "bbl/d (us;oil)",
|
||||
64: "bbl/s (us;tank)",
|
||||
65: "bbl/min (us;tank)",
|
||||
66: "bbl/h (us;tank)",
|
||||
67: "bbl/d (us;tank)",
|
||||
68: "gal/s (imp)",
|
||||
69: "gal/min (imp)",
|
||||
70: "gal/h (imp)",
|
||||
71: "gal/d (imp)",
|
||||
72: "Mgal/s (imp)",
|
||||
73: "Mgal/min (imp)",
|
||||
74: "Mgal/h (imp)",
|
||||
75: "Mgal/d (imp)",
|
||||
76: "bbl/s (imp;beer)",
|
||||
77: "bbl/min (imp;beer)",
|
||||
78: "bbl/h (imp;beer)",
|
||||
79: "bbl/d (imp;beer)",
|
||||
80: "bbl/s (imp;oil)",
|
||||
81: "bbl/min (imp;oil)",
|
||||
82: "bbl/h (imp;oil)",
|
||||
83: "bbl/d (imp;oil)",
|
||||
88: "kgal/s (us)",
|
||||
89: "kgal/min (us)",
|
||||
90: "kgal/h (us)",
|
||||
91: "kgal/d (us)",
|
||||
92: "MMft cubed/s",
|
||||
93: "MMft cubed/min",
|
||||
94: "MMft cubed/h",
|
||||
96: "Mft cubed/d"
|
||||
}
|
||||
|
||||
drive_fault_a_codes = {
|
||||
0: "Overload Trip",
|
||||
1: "Underload Trip",
|
||||
2: "Inverter Overload Trip",
|
||||
3: "E-Thermal Trip",
|
||||
4: "Ground Fault Trip",
|
||||
5: "Output Image Trip",
|
||||
6: "Inmput Imaging Trip",
|
||||
7: "Reserved",
|
||||
8: "Reserved",
|
||||
9: "NTC Trip",
|
||||
10: "Overcurrent Trip",
|
||||
11: "Overvoltage Trip",
|
||||
12: "External Trip",
|
||||
13: "Arm Short",
|
||||
14: "Over Heat Trip",
|
||||
15: "Fuse Open Trip"
|
||||
}
|
||||
drive_fault_b_codes = {
|
||||
0: "Reserved",
|
||||
1: "Reserved",
|
||||
2: "Reserved",
|
||||
3: "FAN Trip",
|
||||
4: "Reserved",
|
||||
5: "Reserved",
|
||||
6: "Pre PID Fail",
|
||||
7: "Bad contact at basic I/O board",
|
||||
8: "External Brake Trip",
|
||||
9: "No Motor Trip",
|
||||
10: "Bad Option Card",
|
||||
11: "Reserved",
|
||||
12: "Reserved",
|
||||
13: "Reserved",
|
||||
14: "Pre Over Heat Trip",
|
||||
15: "Reserved"
|
||||
}
|
||||
|
||||
|
||||
piflow = {
|
||||
"totalizer_1_units": totalizer_codes.get(value, "Invalid Code"),
|
||||
"totalizer_2_units": totalizer_codes.get(value, "Invalid Code"),
|
||||
"totalizer_3_units": totalizer_codes.get(value, "Invalid Code"),
|
||||
"flow_unit": flow_codes.get(value, "Invalid Code"),
|
||||
"fault_a": drive_fault_a_codes.get(value, "Invalid Code"),
|
||||
"fault_b": drive_fault_b_codes.get(value, "Invalid Code"),
|
||||
"run_status": decode_status(value)
|
||||
}
|
||||
|
||||
return piflow.get(measure, "Invalid Tag")
|
||||
Reference in New Issue
Block a user