added rr_pipeline
This commit is contained in:
11
Pub_Sub/abbflow_rtu/thingsboard/measures.csv
Normal file
11
Pub_Sub/abbflow_rtu/thingsboard/measures.csv
Normal file
@@ -0,0 +1,11 @@
|
||||
MeasuringPointName,ControllerName,GroupName,UploadType,DataType,Address,Decimal,Len,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit
|
||||
battery_voltage,abbflow,default,periodic,FLOAT,44002,2,1,ro,,,none,,,,,,,,
|
||||
volume_flow,abbflow,default,periodic,FLOAT,44004,2,1,ro,,,none,,,,,,,,
|
||||
today_volume,abbflow,default,periodic,FLOAT,44006,2,1,ro,,,none,,,,,,,,
|
||||
yesterday_volume,abbflow,default,periodic,FLOAT,44008,2,1,ro,,,none,,,,,,,,
|
||||
accumulated_volume,abbflow,default,periodic,FLOAT,44010,2,1,ro,,,none,,,,,,,,
|
||||
test,abbflow,default,periodic,FLOAT,44012,2,1,ro,,,none,,,,,,,,
|
||||
differential_pressure,abbflow,default,periodic,FLOAT,44014,2,1,ro,,,none,,,,,,,,
|
||||
static_pressure,abbflow,default,periodic,FLOAT,44016,2,1,ro,,,none,,,,,,,,
|
||||
temperature,abbflow,default,periodic,FLOAT,44018,2,1,ro,,,none,,,,,,,,
|
||||
charger_voltage,abbflow,default,periodic,FLOAT,44020,2,1,ro,,,none,,,,,,,,
|
||||
|
31
Pub_Sub/rr_pipeline/thingsboard/pub/sendAlarm.py
Normal file
31
Pub_Sub/rr_pipeline/thingsboard/pub/sendAlarm.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Enter your python code.
|
||||
import json, time
|
||||
from datetime import datetime as dt
|
||||
from common.Logger import logger
|
||||
from quickfaas.global_dict import get as get_params
|
||||
from quickfaas.remotebus import publish
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
def controlName(name):
|
||||
try:
|
||||
params = convertDStoJSON(get_params())
|
||||
nameMap = json.loads(params.get("name_map"))
|
||||
return nameMap.get(name, name)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
return name
|
||||
|
||||
def sendAlarm(message, wizard_api, cloudName):
|
||||
logger.debug("publish topic:%s, payload: %s, cloudName: %s" % (__topic__, message, cloudName))
|
||||
now = round(time.time() * 1000)
|
||||
payload = {}
|
||||
for key, value in message["values"].items():
|
||||
ctrlName = controlName(value["ctrlName"])
|
||||
payload[ctrlName] = [{"ts": now, "values": {value["measureName"]: value["value"]}}]
|
||||
logger.info(json.dumps(payload, indent=4))
|
||||
publish(__topic__, json.dumps(payload), __qos__, cloud_name=cloudName)
|
||||
113
Pub_Sub/rr_pipeline/thingsboard/pub/sendData.py
Normal file
113
Pub_Sub/rr_pipeline/thingsboard/pub/sendData.py
Normal file
@@ -0,0 +1,113 @@
|
||||
# Enter your python code.
|
||||
import json, time
|
||||
from datetime import datetime as dt
|
||||
from common.Logger import logger
|
||||
from quickfaas.remotebus import publish
|
||||
from quickfaas.global_dict import get as get_params
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
def chunk_payload(payload, chunk_size=20, is_attributes_payload=False):
|
||||
if is_attributes_payload:
|
||||
# For attributes payload, chunk the controllers
|
||||
controllers = list(payload.items())
|
||||
for i in range(0, len(controllers), chunk_size):
|
||||
yield dict(controllers[i:i + chunk_size])
|
||||
else:
|
||||
# For data payload, chunk the values within each controller
|
||||
for controller, data in payload.items():
|
||||
for entry in data:
|
||||
ts = entry['ts']
|
||||
values = entry['values']
|
||||
chunked_values = list(values.items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
controller: [{
|
||||
"ts": ts,
|
||||
"values": dict(chunked_values[i:i + chunk_size])
|
||||
}]
|
||||
}
|
||||
|
||||
def controlName(name):
|
||||
try:
|
||||
params = convertDStoJSON(get_params())
|
||||
nameMap = json.loads(params.get("name_map"))
|
||||
return nameMap.get(name, name)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
return name
|
||||
|
||||
def sendData(message):
|
||||
#logger.debug(message)
|
||||
# Extract measures and group by ctrlName
|
||||
grouped_data = {}
|
||||
grouped_attributes = {}
|
||||
valves = {}
|
||||
now = (round(dt.timestamp(dt.now())/600)*600)*1000
|
||||
for measure in message['measures']:
|
||||
ctrlName = controlName(measure['ctrlName'])
|
||||
name = measure['name']
|
||||
value = measure['value']
|
||||
health = measure['health']
|
||||
#Add controller for telemetry if it doesn't exist
|
||||
if ctrlName not in grouped_data:
|
||||
grouped_data[ctrlName] = {}
|
||||
#Add controller for attributes if it doesn't exist
|
||||
if ctrlName not in grouped_attributes:
|
||||
grouped_attributes[ctrlName] = {}
|
||||
grouped_attributes[ctrlName]["latestReportTime"] = now
|
||||
#Add data to temp payload if datapoint health is good
|
||||
if health:
|
||||
if any(x in name for x in ["open", "closed"]):
|
||||
valve = "_".join(name.split("_")[:-1])
|
||||
if valve not in valves:
|
||||
valves[valve] = [None,None]
|
||||
|
||||
if "open" in name:
|
||||
valves[valve][1] = value
|
||||
elif "closed" in name:
|
||||
valves[valve][0] = value
|
||||
else:
|
||||
print("error")
|
||||
grouped_data[ctrlName][name] = value
|
||||
|
||||
if valves:
|
||||
for key, value in valves.items():
|
||||
# 0 = In Progress | 1 = Open | 2 = Closed | 3 = Error
|
||||
if value[0] == 0 and value[1] == 0: # Not closed and not open thus in progress
|
||||
output = 0
|
||||
elif value[0] == 0 and value[1] == 1: # Not closed but open thus open
|
||||
output = 1
|
||||
elif value[0] == 1 and value[1] == 0: # Closed but not open thus closed
|
||||
output = 2
|
||||
elif value[0] == 1 and value[1] == 1: # Closed and open thus errored not possible
|
||||
output = 3
|
||||
else:
|
||||
output = 4 # Something didn't report
|
||||
grouped_data[ctrlName][key + "_status"] = output
|
||||
# Transform the grouped data to desired structure
|
||||
payload = {}
|
||||
|
||||
for key, value in grouped_data.items():
|
||||
if value:
|
||||
payload[key] = [{"ts": now ,"values": value}]
|
||||
attributes_payload = {}
|
||||
for key, value in grouped_attributes.items():
|
||||
if value:
|
||||
attributes_payload[key] = value
|
||||
|
||||
|
||||
|
||||
#logger.debug(payload)
|
||||
for chunk in chunk_payload(payload=payload):
|
||||
publish(__topic__, json.dumps(chunk), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
for chunk in chunk_payload(payload=attributes_payload, is_attributes_payload=True):
|
||||
publish("v1/gateway/attributes", json.dumps(attributes_payload), __qos__)
|
||||
time.sleep(2)
|
||||
|
||||
16
Pub_Sub/rr_pipeline/thingsboard/rr_pipeline_main.csv
Normal file
16
Pub_Sub/rr_pipeline/thingsboard/rr_pipeline_main.csv
Normal file
@@ -0,0 +1,16 @@
|
||||
MeasuringPointName,ControllerName,GroupName,UploadType,DataType,EnableBit,BitIndex,reverseBit,Address,Decimal,Len,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit,Pt,Ct,Mapping_table,TransDecimal,bitMap,msecSample,DataEndianReverse,ReadOffset,ReadLength,DataParseMethod,BitId,storageLwTSDB
|
||||
comms_fail_to_remote_1_alm,pipeline_main,default,periodic,BIT,,,0,AL0_Comm_Fail_With_Slave,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1
|
||||
contract_hour,pipeline_main,default,periodic,INT,0,,,Contract_Hour,,,ro,,,none,,,,,,,,,,,,,,,,,,,,1
|
||||
fm_1_flow_rate,pipeline_main,default,periodic,FLOAT,,,,Val_FM_Flow_Rate,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1
|
||||
fm_1_last_month_total,pipeline_main,default,periodic,FLOAT,,,,Val_FM_LastMonth_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1
|
||||
fm_1_lifetime_total,pipeline_main,default,periodic,FLOAT,,,,Val_FM_Lifetime_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1
|
||||
fm_1_monthly_total,pipeline_main,default,periodic,FLOAT,,,,Val_FM_Monthly_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1
|
||||
fm_1_today_total,pipeline_main,default,periodic,FLOAT,,,,Val_FM_Todays_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1
|
||||
fm_1_yesterday_total,pipeline_main,default,periodic,FLOAT,,,,Val_FM_Yest_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1
|
||||
pipeline_1_pressure,pipeline_main,default,periodic,FLOAT,,,,Val_Pipeline_1_PSI,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1
|
||||
valve_1_closed,pipeline_main,default,periodic,BIT,,,0,FBK_Valve_1_Closed,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1
|
||||
valve_1_open,pipeline_main,default,periodic,BIT,,,0,FBK_Valve_1_Open,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1
|
||||
valve_1_toggle_cmd,pipeline_main,default,periodic,BIT,,,0,CMD_Valve_1_Open,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1
|
||||
valve_2_closed,pipeline_main,default,periodic,BIT,,,0,FBK_Valve_2_Closed,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1
|
||||
valve_2_open,pipeline_main,default,periodic,BIT,,,0,FBK_Valve_2_Open,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1
|
||||
valve_2_toggle_cmd,pipeline_main,default,periodic,BIT,,,0,CMD_Valve_2_Open,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1
|
||||
|
5
Pub_Sub/rr_pipeline/thingsboard/rr_pipeline_slave.csv
Normal file
5
Pub_Sub/rr_pipeline/thingsboard/rr_pipeline_slave.csv
Normal file
@@ -0,0 +1,5 @@
|
||||
MeasuringPointName,ControllerName,GroupName,UploadType,DataType,EnableBit,BitIndex,reverseBit,Address,Decimal,Len,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit,Pt,Ct,Mapping_table,TransDecimal,bitMap,msecSample,DataEndianReverse,ReadOffset,ReadLength,DataParseMethod,BitId,storageLwTSDB
|
||||
pipeline_1_pressure,pipeline_remote_1,default,periodic,FLOAT,,,,Val_Pipeline_1_PSI,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1
|
||||
valve_1_closed,pipeline_remote_1,default,periodic,BIT,,,0,FBK_Valve_1_Closed,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1
|
||||
valve_1_open,pipeline_remote_1,default,periodic,BIT,,,0,FBK_Valve_1_Open,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1
|
||||
valve_1_toggle_cmd,pipeline_remote_1,default,periodic,BIT,,,0,CMD_Valve_1_Open,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1
|
||||
|
204
Pub_Sub/rr_pipeline/thingsboard/sub/receiveAttributeGateway.py
Normal file
204
Pub_Sub/rr_pipeline/thingsboard/sub/receiveAttributeGateway.py
Normal file
@@ -0,0 +1,204 @@
|
||||
import json, time
|
||||
from datetime import datetime as dt
|
||||
from quickfaas.measure import recall, write
|
||||
from quickfaas.remotebus import publish
|
||||
from common.Logger import logger
|
||||
from quickfaas.global_dict import get as get_params
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
def invertJSON(payload):
|
||||
newJSON = {}
|
||||
for key, value in payload.items():
|
||||
newJSON[value] = key
|
||||
return newJSON
|
||||
|
||||
def convertName(name):
|
||||
try:
|
||||
params = convertDStoJSON(get_params())
|
||||
nameMap = invertJSON(json.loads(params.get("name_map")))
|
||||
return nameMap.get(name, name)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
return name
|
||||
|
||||
|
||||
def chunk_payload(payload, chunk_size=20):
|
||||
if "values" in payload:
|
||||
# Original format: {"ts": ..., "values": {...}}
|
||||
chunked_values = list(payload["values"].items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
"ts": payload["ts"],
|
||||
"values": dict(chunked_values[i:i+chunk_size])
|
||||
}
|
||||
else:
|
||||
# New format: {"key1": "value1", "key2": "value2"}
|
||||
chunked_keys = list(payload.keys())
|
||||
for i in range(0, len(chunked_keys), chunk_size):
|
||||
yield {k: payload[k] for k in chunked_keys[i:i+chunk_size]}
|
||||
|
||||
def chunk_payload_gateway(payload, chunk_size=20, is_attributes_payload=False):
|
||||
if is_attributes_payload:
|
||||
# For attributes payload, chunk the controllers
|
||||
controllers = list(payload.items())
|
||||
for i in range(0, len(controllers), chunk_size):
|
||||
yield dict(controllers[i:i + chunk_size])
|
||||
else:
|
||||
# For data payload, chunk the values within each controller
|
||||
for controller, data in payload.items():
|
||||
for entry in data:
|
||||
ts = entry['ts']
|
||||
values = entry['values']
|
||||
chunked_values = list(values.items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
controller: [{
|
||||
"ts": ts,
|
||||
"values": dict(chunked_values[i:i + chunk_size])
|
||||
}]
|
||||
}
|
||||
|
||||
def controlName(name):
|
||||
try:
|
||||
params = convertDStoJSON(get_params())
|
||||
nameMap = json.loads(params.get("name_map"))
|
||||
return nameMap.get(name, name)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
return name
|
||||
|
||||
|
||||
# Filter payloads based on device_filter
|
||||
def filter_payload(payload, device_filter):
|
||||
if not device_filter: # If filter is empty, include all devices
|
||||
return payload
|
||||
return {key: value for key, value in payload.items() if key in device_filter}
|
||||
|
||||
|
||||
def sync(device_filter=[]):
|
||||
#get new values and send
|
||||
now = round(dt.timestamp(dt.now()))*1000
|
||||
topic = "v1/gateway/telemetry"
|
||||
try:
|
||||
data = recall()#json.loads(recall().decode("utf-8"))
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
logger.debug(data)
|
||||
logger.info("SYNCING")
|
||||
valves = {}
|
||||
grouped_data = {}
|
||||
grouped_attributes = {}
|
||||
try:
|
||||
for controller in data:
|
||||
for measure in controller["measures"]:
|
||||
ctrlName = controlName(measure["name"])
|
||||
value = measure['value']
|
||||
health = measure['health']
|
||||
name = measure['name']
|
||||
#Add controller for telemetry if it doesn't exist
|
||||
if ctrlName not in grouped_data:
|
||||
grouped_data[ctrlName] = {}
|
||||
#Add controller for attributes if it doesn't exist
|
||||
if ctrlName not in grouped_attributes:
|
||||
grouped_attributes[ctrlName] = {}
|
||||
grouped_attributes[ctrlName]["latestReportTime"] = now
|
||||
#Add data to temp payload if datapoint health is good
|
||||
if health:
|
||||
if any(x in name for x in ["open", "closed"]):
|
||||
valve = "_".join(name.split("_")[:-1])
|
||||
if valve not in valves:
|
||||
valves[valve] = [None,None]
|
||||
|
||||
if "open" in name:
|
||||
valves[valve][1] = value
|
||||
elif "closed" in name:
|
||||
valves[valve][0] = value
|
||||
else:
|
||||
print("error")
|
||||
grouped_data[ctrlName][measure["name"]] = value
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
try:
|
||||
# Transform the grouped data to desired structure
|
||||
payload_gateway = {}
|
||||
if valves:
|
||||
for key, value in valves.items():
|
||||
# 0 = In Progress | 1 = Open | 2 = Closed | 3 = Error
|
||||
if value[0] == 0 and value[1] == 0: # Not closed and not open thus in progress
|
||||
output = 0
|
||||
elif value[0] == 0 and value[1] == 1: # Not closed but open thus open
|
||||
output = 1
|
||||
elif value[0] == 1 and value[1] == 0: # Closed but not open thus closed
|
||||
output = 2
|
||||
elif value[0] == 1 and value[1] == 1: # Closed and open thus errored not possible
|
||||
output = 3
|
||||
else:
|
||||
output = 4 # Something didn't report
|
||||
grouped_data[ctrlName][key + "_status"] = output
|
||||
for key, value in grouped_data.items():
|
||||
if value:
|
||||
payload_gateway[key] = [{"ts": now ,"values": value}]
|
||||
|
||||
attributes_payload_gateway = {}
|
||||
for key, value in grouped_attributes.items():
|
||||
if value:
|
||||
attributes_payload_gateway[key] = value
|
||||
|
||||
# Apply the filter
|
||||
filtered_payload_gateway = filter_payload(payload_gateway, device_filter)
|
||||
filtered_attributes_payload_gateway = filter_payload(attributes_payload_gateway, device_filter)
|
||||
|
||||
#Send gateway devices data
|
||||
for chunk in chunk_payload_gateway(payload=filtered_payload_gateway):
|
||||
publish("v1/gateway/telemetry", json.dumps(chunk), qos=1, cloud_name="default")
|
||||
time.sleep(2)
|
||||
|
||||
for chunk in chunk_payload_gateway(payload=filtered_attributes_payload_gateway, is_attributes_payload=True):
|
||||
publish("v1/gateway/attributes", json.dumps(chunk), qos=1, cloud_name="default")
|
||||
time.sleep(2)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
|
||||
|
||||
def writeplctag(value, controller):
|
||||
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
|
||||
try:
|
||||
logger.info(f"Writing to {controller} with params {value}")
|
||||
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
|
||||
message = [{"name": controller, "measures":[{"name":value["measurement"], "value": value["value"]}]}]
|
||||
resp = write(message)
|
||||
logger.debug("RETURN FROM WRITE: {}".format(resp))
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
return False
|
||||
|
||||
|
||||
def receiveAttribute(topic, payload):
|
||||
try:
|
||||
logger.debug(topic)
|
||||
logger.debug(json.loads(payload))
|
||||
p = json.loads(payload)
|
||||
device = convertName(p["device"])
|
||||
for key, value in p["data"].items():
|
||||
try:
|
||||
result = writeplctag({"measurement":key, "value":value}, device)
|
||||
logger.debug(result)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
#logger.debug(command)
|
||||
time.sleep(5)
|
||||
try:
|
||||
sync(device)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not sync: {e}")
|
||||
except Exception as e:
|
||||
logger.debug(e)
|
||||
|
||||
216
Pub_Sub/rr_pipeline/thingsboard/sub/receiveCommandGateway.py
Normal file
216
Pub_Sub/rr_pipeline/thingsboard/sub/receiveCommandGateway.py
Normal file
@@ -0,0 +1,216 @@
|
||||
import json, time
|
||||
from datetime import datetime as dt
|
||||
from quickfaas.measure import recall, write
|
||||
from quickfaas.remotebus import publish
|
||||
from common.Logger import logger
|
||||
from quickfaas.global_dict import get as get_params
|
||||
|
||||
def convertDStoJSON(ds):
|
||||
j = dict()
|
||||
for x in ds:
|
||||
j[x["key"]] = x["value"]
|
||||
return j
|
||||
|
||||
def invertJSON(payload):
|
||||
newJSON = {}
|
||||
for key, value in payload.items():
|
||||
newJSON[value] = key
|
||||
return newJSON
|
||||
|
||||
def convertName(name):
|
||||
try:
|
||||
params = convertDStoJSON(get_params())
|
||||
nameMap = invertJSON(json.loads(params.get("name_map")))
|
||||
return nameMap.get(name, name)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in convertName: {e}")
|
||||
return name
|
||||
|
||||
|
||||
def chunk_payload(payload, chunk_size=20):
|
||||
if "values" in payload:
|
||||
# Original format: {"ts": ..., "values": {...}}
|
||||
chunked_values = list(payload["values"].items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
"ts": payload["ts"],
|
||||
"values": dict(chunked_values[i:i+chunk_size])
|
||||
}
|
||||
else:
|
||||
# New format: {"key1": "value1", "key2": "value2"}
|
||||
chunked_keys = list(payload.keys())
|
||||
for i in range(0, len(chunked_keys), chunk_size):
|
||||
yield {k: payload[k] for k in chunked_keys[i:i+chunk_size]}
|
||||
|
||||
def chunk_payload_gateway(payload, chunk_size=20, is_attributes_payload=False):
|
||||
if is_attributes_payload:
|
||||
# For attributes payload, chunk the controllers
|
||||
controllers = list(payload.items())
|
||||
for i in range(0, len(controllers), chunk_size):
|
||||
yield dict(controllers[i:i + chunk_size])
|
||||
else:
|
||||
# For data payload, chunk the values within each controller
|
||||
for controller, data in payload.items():
|
||||
for entry in data:
|
||||
ts = entry['ts']
|
||||
values = entry['values']
|
||||
chunked_values = list(values.items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
controller: [{
|
||||
"ts": ts,
|
||||
"values": dict(chunked_values[i:i + chunk_size])
|
||||
}]
|
||||
}
|
||||
|
||||
def controlName(name):
|
||||
try:
|
||||
params = convertDStoJSON(get_params())
|
||||
nameMap = json.loads(params.get("name_map"))
|
||||
return nameMap.get(name, name)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in controlName: {e}")
|
||||
return name
|
||||
|
||||
|
||||
# Filter payloads based on device_filter
|
||||
def filter_payload(payload, device_filter):
|
||||
if not device_filter: # If filter is empty, include all devices
|
||||
return payload
|
||||
return {key: value for key, value in payload.items() if key in device_filter}
|
||||
|
||||
|
||||
def sync(device_filter=[]):
|
||||
#get new values and send
|
||||
now = round(dt.timestamp(dt.now()))*1000
|
||||
topic = "v1/gateway/telemetry"
|
||||
try:
|
||||
data = recall()#json.loads(recall().decode("utf-8"))
|
||||
except Exception as e:
|
||||
logger.error(f"Error in trying to get data in sync: {e}")
|
||||
logger.debug(data)
|
||||
logger.info("SYNCING")
|
||||
valves = {}
|
||||
grouped_data = {}
|
||||
grouped_attributes = {}
|
||||
try:
|
||||
for controller in data:
|
||||
for measure in controller["measures"]:
|
||||
ctrlName = controlName(measure["name"])
|
||||
value = measure['value']
|
||||
health = measure['health']
|
||||
name = measure['name']
|
||||
#Add controller for telemetry if it doesn't exist
|
||||
if ctrlName not in grouped_data:
|
||||
grouped_data[ctrlName] = {}
|
||||
#Add controller for attributes if it doesn't exist
|
||||
if ctrlName not in grouped_attributes:
|
||||
grouped_attributes[ctrlName] = {}
|
||||
grouped_attributes[ctrlName]["latestReportTime"] = now
|
||||
#Add data to temp payload if datapoint health is good
|
||||
if health:
|
||||
if any(x in name for x in ["open", "closed"]):
|
||||
valve = "_".join(name.split("_")[:-1])
|
||||
if valve not in valves:
|
||||
valves[valve] = [None,None]
|
||||
|
||||
if "open" in name:
|
||||
valves[valve][1] = value
|
||||
elif "closed" in name:
|
||||
valves[valve][0] = value
|
||||
else:
|
||||
print("error")
|
||||
grouped_data[ctrlName][measure["name"]] = value
|
||||
except Exception as e:
|
||||
logger.error(f"Error in sync trying to group data: {e}")
|
||||
try:
|
||||
# Transform the grouped data to desired structure
|
||||
payload_gateway = {}
|
||||
if valves:
|
||||
for key, value in valves.items():
|
||||
# 0 = In Progress | 1 = Open | 2 = Closed | 3 = Error
|
||||
if value[0] == 0 and value[1] == 0: # Not closed and not open thus in progress
|
||||
output = 0
|
||||
elif value[0] == 0 and value[1] == 1: # Not closed but open thus open
|
||||
output = 1
|
||||
elif value[0] == 1 and value[1] == 0: # Closed but not open thus closed
|
||||
output = 2
|
||||
elif value[0] == 1 and value[1] == 1: # Closed and open thus errored not possible
|
||||
output = 3
|
||||
else:
|
||||
output = 4 # Something didn't report
|
||||
grouped_data[ctrlName][key + "_status"] = output
|
||||
for key, value in grouped_data.items():
|
||||
if value:
|
||||
payload_gateway[key] = [{"ts": now ,"values": value}]
|
||||
|
||||
attributes_payload_gateway = {}
|
||||
for key, value in grouped_attributes.items():
|
||||
if value:
|
||||
attributes_payload_gateway[key] = value
|
||||
|
||||
# Apply the filter
|
||||
filtered_payload_gateway = filter_payload(payload_gateway, device_filter)
|
||||
filtered_attributes_payload_gateway = filter_payload(attributes_payload_gateway, device_filter)
|
||||
|
||||
#Send gateway devices data
|
||||
for chunk in chunk_payload_gateway(payload=filtered_payload_gateway):
|
||||
publish("v1/gateway/telemetry", json.dumps(chunk), qos=1, cloud_name="default")
|
||||
time.sleep(2)
|
||||
|
||||
for chunk in chunk_payload_gateway(payload=filtered_attributes_payload_gateway, is_attributes_payload=True):
|
||||
publish("v1/gateway/attributes", json.dumps(chunk), qos=1, cloud_name="default")
|
||||
time.sleep(2)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in sync sending data: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
def writeplctag(value, controller):
|
||||
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
|
||||
try:
|
||||
logger.info(f"Writing to {controller} with params {value}")
|
||||
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
|
||||
message = [{"name": controller, "measures":[{"name":value["measurement"], "value": value["value"]}]}]
|
||||
resp = write(message)
|
||||
logger.debug("RETURN FROM WRITE: {}".format(resp))
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error in writeplctag: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def receiveCommand(topic, payload):
|
||||
try:
|
||||
logger.debug(topic)
|
||||
logger.info(json.loads(payload))
|
||||
p = json.loads(payload)
|
||||
#logger.info(p)
|
||||
command = p["data"]["method"]
|
||||
device = convertName(p["device"])
|
||||
commands = {
|
||||
"sync": sync,
|
||||
"writeplctag": writeplctag,
|
||||
}
|
||||
if command == "setPLCTag":
|
||||
try:
|
||||
#logger.info(params)
|
||||
result = commands["writeplctag"](p["data"]["params"], device)
|
||||
logger.debug(result)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in receiveCommand setPLCTag: {e}")
|
||||
ackPayload = {"device": p["device"], "id": p["data"]["id"], "data": {"success": True}}
|
||||
ack(ackPayload)
|
||||
time.sleep(5)
|
||||
try:
|
||||
sync(device)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not sync: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in receiveCommand: {e}")
|
||||
|
||||
|
||||
def ack(message):
|
||||
publish("v1/gateway/rpc", json.dumps(message), 1, cloud_name="default")
|
||||
19
code snippets/alarmExample3-1-10.json
Normal file
19
code snippets/alarmExample3-1-10.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"timestamp": 1735570981,
|
||||
"group_name": "default",
|
||||
"timestampMsec": 1735570981164,
|
||||
"values": {
|
||||
"ww_6_lo_discharge_alm": {
|
||||
"ctrlName": "pipeline_main",
|
||||
"measureName": "comms_to_remote_1_alm",
|
||||
"timestamp": 1735570981,
|
||||
"timestampMsec": 1735570981036,
|
||||
"current": "on",
|
||||
"status": 0,
|
||||
"value": 1,
|
||||
"alarm_value": 1,
|
||||
"alarm_content": "Failure",
|
||||
"level": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user