updated pub_sub with chunking code
This commit is contained in:
BIN
Pub_Sub/.DS_Store
vendored
BIN
Pub_Sub/.DS_Store
vendored
Binary file not shown.
BIN
Pub_Sub/abbflow/.DS_Store
vendored
BIN
Pub_Sub/abbflow/.DS_Store
vendored
Binary file not shown.
@@ -1,5 +1,5 @@
|
|||||||
# Enter your python code.
|
# Enter your python code.
|
||||||
import json, os
|
import json, os, time
|
||||||
from datetime import datetime as dt
|
from datetime import datetime as dt
|
||||||
from datetime import timedelta as td
|
from datetime import timedelta as td
|
||||||
from common.Logger import logger
|
from common.Logger import logger
|
||||||
@@ -183,6 +183,15 @@ def totalizeMonth(lifetime):
|
|||||||
reset = True
|
reset = True
|
||||||
return (value,reset)
|
return (value,reset)
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
def sendData(message):
|
def sendData(message):
|
||||||
logger.debug(message)
|
logger.debug(message)
|
||||||
try:
|
try:
|
||||||
@@ -209,7 +218,10 @@ def sendData(message):
|
|||||||
del payload["values"]["today_volume"]
|
del payload["values"]["today_volume"]
|
||||||
if payload["values"]["month_volume"] < 0:
|
if payload["values"]["month_volume"] < 0:
|
||||||
del payload["values"]["month_volume"]
|
del payload["values"]["month_volume"]
|
||||||
publish(__topic__, json.dumps(payload), __qos__)
|
|
||||||
|
for chunk in chunk_payload(payload=payload):
|
||||||
|
publish(__topic__, json.dumps(chunk), __qos__)
|
||||||
|
time.sleep(2)
|
||||||
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||||
|
|
||||||
if dayReset:
|
if dayReset:
|
||||||
|
|||||||
@@ -1,29 +1,42 @@
|
|||||||
# Enter your python code.
|
import json, time
|
||||||
import json
|
from datetime import datetime as dt
|
||||||
from quickfaas.measure import recall
|
from quickfaas.measure import recall, write
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
from common.Logger import logger
|
from common.Logger import logger
|
||||||
|
|
||||||
def sync(wizard_api):
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
|
def sync():
|
||||||
#get new values and send
|
#get new values and send
|
||||||
payload = {}
|
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
|
||||||
|
topic = "v1/devices/me/telemetry"
|
||||||
try:
|
try:
|
||||||
data = recall()#json.loads(recall().decode("utf-8"))
|
data = recall()#json.loads(recall().decode("utf-8"))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
logger.info(data)
|
logger.debug(data)
|
||||||
for controller in data:
|
for controller in data:
|
||||||
for measure in controller["measures"]:
|
for measure in controller["measures"]:
|
||||||
#publish measure
|
#publish measure
|
||||||
topic = "v1/devices/me/telemetry"
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
payload[measure["name"]] = measure["value"]
|
logger.debug("Sending on topic: {}".format(topic))
|
||||||
logger.debug("Sending on topic: {}".format(topic))
|
logger.debug("Sending value: {}".format(payload))
|
||||||
logger.debug("Sending value: {}".format(payload))
|
for chunk in chunk_payload(payload=payload):
|
||||||
wizard_api.mqtt_publish(topic, json.dumps(payload))
|
publish(topic, json.dumps(chunk), 1)
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
def writeplctag(value, wizard_api):
|
def writeplctag(value, wizard_api):
|
||||||
try:
|
try:
|
||||||
#value = json.loads(value.replace("'",'"'))
|
#value = json.loads(value.replace("'",'"'))
|
||||||
logger.debug(value)
|
logger.debug(value)
|
||||||
message = {"advvfdipp":{value["measurement"]: value["value"]}}
|
message = {"abbflow":{value["measurement"]: value["value"]}}
|
||||||
resp = wizard_api.write_plc_values(message)
|
resp = wizard_api.write_plc_values(message)
|
||||||
#logger.debug("RETURN FROM WRITE: {}".format(resp))
|
#logger.debug("RETURN FROM WRITE: {}".format(resp))
|
||||||
return True
|
return True
|
||||||
@@ -44,7 +57,7 @@ def receiveCommand(topic, payload, wizard_api):
|
|||||||
if command == "setPLCTag":
|
if command == "setPLCTag":
|
||||||
result = commands["writeplctag"](p["params"],wizard_api)
|
result = commands["writeplctag"](p["params"],wizard_api)
|
||||||
if result:
|
if result:
|
||||||
sync(wizard_api)
|
sync()
|
||||||
#commands[command](p["mac"].lower(),p["payload"]["value"], wizard_api)
|
#commands[command](p["mac"].lower(),p["payload"]["value"], wizard_api)
|
||||||
#logger.debug(command)
|
#logger.debug(command)
|
||||||
ack(topic.split("/")[-1], wizard_api)
|
ack(topic.split("/")[-1], wizard_api)
|
||||||
|
|||||||
BIN
Pub_Sub/abbflow/v1/.DS_Store
vendored
BIN
Pub_Sub/abbflow/v1/.DS_Store
vendored
Binary file not shown.
@@ -1,382 +0,0 @@
|
|||||||
{
|
|
||||||
"controllers": [
|
|
||||||
{
|
|
||||||
"protocol": "Modbus-RTU",
|
|
||||||
"name": "ABBFlow",
|
|
||||||
"args": {
|
|
||||||
"slaveAddr": 1,
|
|
||||||
"int16Ord": "ab",
|
|
||||||
"int32Ord": "abcd",
|
|
||||||
"float32Ord": "abcd",
|
|
||||||
"continuousAcquisition": 1,
|
|
||||||
"maxContinuousNumber": 64
|
|
||||||
},
|
|
||||||
"endpoint": "rs232",
|
|
||||||
"samplePeriod": 10,
|
|
||||||
"expired": 10000
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"groups": [
|
|
||||||
{
|
|
||||||
"name": "default",
|
|
||||||
"uploadInterval": 3600,
|
|
||||||
"reference": 10
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"measures": [
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "volume_flow",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44003",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44003"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "today_volume",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44005",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44005"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "battery_voltage",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44001",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44001"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "differential_pressure",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44013",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44013"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "yesterday_volume",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44007",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44007"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "accumulated_volume",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44009",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44009"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "static_pressure",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44015",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44015"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "temperature",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44017",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44017"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "charger_voltage",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44019",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44019"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "last_calculation_period_volume",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44012",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44012"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"alarms": [],
|
|
||||||
"misc": {
|
|
||||||
"maxAlarmRecordSz": 2000,
|
|
||||||
"logLvl": "DEBUG",
|
|
||||||
"coms": [
|
|
||||||
{
|
|
||||||
"name": "rs232",
|
|
||||||
"baud": 9600,
|
|
||||||
"bits": 8,
|
|
||||||
"stopbits": 1,
|
|
||||||
"parityChk": "n"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "rs485",
|
|
||||||
"baud": 9600,
|
|
||||||
"bits": 8,
|
|
||||||
"stopbits": 1,
|
|
||||||
"parityChk": "n"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"clouds": [
|
|
||||||
{
|
|
||||||
"cacheSize": 100,
|
|
||||||
"enable": 1,
|
|
||||||
"name": "default",
|
|
||||||
"type": "Standard MQTT",
|
|
||||||
"args": {
|
|
||||||
"host": "mq194.imistaway.net",
|
|
||||||
"port": 1883,
|
|
||||||
"clientId": "abbflow-inhand-test",
|
|
||||||
"auth": 1,
|
|
||||||
"tls": 0,
|
|
||||||
"cleanSession": 1,
|
|
||||||
"mqttVersion": "v3.1.1",
|
|
||||||
"keepalive": 60,
|
|
||||||
"key": "",
|
|
||||||
"cert": "",
|
|
||||||
"rootCA": "",
|
|
||||||
"verifyServer": 0,
|
|
||||||
"verifyClient": 0,
|
|
||||||
"username": "admin",
|
|
||||||
"passwd": "columbus",
|
|
||||||
"authType": 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"labels": [
|
|
||||||
{
|
|
||||||
"key": "SN",
|
|
||||||
"value": "GF5022137006262"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key": "MAC",
|
|
||||||
"value": "00:18:05:1a:e5:57"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key": "MAC_UPPER",
|
|
||||||
"value": "00:18:05:1A:E5:57"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key": "MAC_LOWER",
|
|
||||||
"value": "00:18:05:1a:e5:57"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"quickfaas": {
|
|
||||||
"genericFuncs": [],
|
|
||||||
"uploadFuncs": [
|
|
||||||
{
|
|
||||||
"name": "Mistaway",
|
|
||||||
"trigger": "measure_event",
|
|
||||||
"topic": "meshify/db/194/_/abbflow/${MAC_LOWER}",
|
|
||||||
"qos": 1,
|
|
||||||
"groups": [
|
|
||||||
"default"
|
|
||||||
],
|
|
||||||
"funcName": "sendData",
|
|
||||||
"script": "# Enter your python code.\nimport json\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nimport re, uuid\n\n\ndef sendData(message):\n logger.debug(message)\n mac = ':'.join(re.findall('..', '%012x' % uuid.getnode()))\n for measure in message[\"measures\"]:\n publish(__topic__ + mac + \":01:99/\" + measure[\"name\"], json.dumps([{\"value\": measure[\"value\"]}]), __qos__)\n #publish(__topic__, json.dumps({measure[\"name\"]: measure[\"value\"]}), __qos__)\n ",
|
|
||||||
"msgType": 0,
|
|
||||||
"cloudName": "default"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"downloadFuncs": [
|
|
||||||
{
|
|
||||||
"name": "Commands",
|
|
||||||
"topic": "meshify/sets/194/${MAC_UPPER}:01:99",
|
|
||||||
"qos": 1,
|
|
||||||
"funcName": "receiveCommand",
|
|
||||||
"payload_type": "Plaintext",
|
|
||||||
"script": "# Enter your python code.\nimport json\nfrom quickfaas.measure import recall\nfrom common.Logger import logger\n\ndef sync(mac, wizard_api):\n #get new values and send\n try:\n data = recall()#json.loads(recall().decode(\"utf-8\"))\n except Exception as e:\n logger.error(e)\n logger.info(data)\n for controller in data:\n for measure in controller[\"measures\"]:\n #publish measure\n topic = \"meshify/db/194/_/abbflow/\" + mac + \"/\" + measure[\"name\"]\n payload = [{\"value\": measure[\"value\"]}]\n logger.debug(\"Sending on topic: {}\".format(topic))\n logger.debug(\"Sending value: {}\".format(payload))\n wizard_api.mqtt_publish(topic, json.dumps(payload))\n \ndef receiveCommand(topic, payload, wizard_api):\n logger.debug(topic)\n logger.debug(json.loads(payload))\n p = json.loads(payload)[0]\n command = p[\"payload\"][\"name\"].split(\".\")[1]\n commands = {\n \"sync\": sync\n }\n commands[command](p[\"mac\"].lower(), wizard_api)\n #logger.debug(command)\n ack(p[\"msgId\"], p[\"mac\"], p[\"payload\"][\"name\"].split(\".\")[1], p[\"payload\"][\"value\"], wizard_api)\n\ndef ack(msgid, mac, name, value, wizard_api):\n #logger.debug(mac)\n mac = \"\".join(mac.split(\":\")[:-2])\n #logger.debug(msgid)\n #logger.debug(mac)\n #logger.debug(name)\n #logger.debug(value)\n wizard_api.mqtt_publish(\"meshify/responses/\" + str(msgid), json.dumps([{\"value\": \"{} Success Setting: {} To: {}\".format(mac,name, value), \"msgid\": str(msgid)}]))",
|
|
||||||
"msgType": 0,
|
|
||||||
"cloudName": "default",
|
|
||||||
"trigger": "command_event"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"modbusSlave": {
|
|
||||||
"enable": 0,
|
|
||||||
"protocol": "Modbus-TCP",
|
|
||||||
"port": 502,
|
|
||||||
"slaveAddr": 1,
|
|
||||||
"int16Ord": "ab",
|
|
||||||
"int32Ord": "abcd",
|
|
||||||
"float32Ord": "abcd",
|
|
||||||
"maxConnection": 5,
|
|
||||||
"mapping_table": []
|
|
||||||
},
|
|
||||||
"iec104Server": {
|
|
||||||
"enable": 0,
|
|
||||||
"cotSize": 2,
|
|
||||||
"port": 2404,
|
|
||||||
"serverList": [
|
|
||||||
{
|
|
||||||
"asduAddr": 1
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"kValue": 12,
|
|
||||||
"wValue": 8,
|
|
||||||
"t0": 15,
|
|
||||||
"t1": 15,
|
|
||||||
"t2": 10,
|
|
||||||
"t3": 20,
|
|
||||||
"maximumLink": 5,
|
|
||||||
"timeSet": 1,
|
|
||||||
"byteOrder": "abcd",
|
|
||||||
"mapping_table": []
|
|
||||||
},
|
|
||||||
"opcuaServer": {
|
|
||||||
"enable": 0,
|
|
||||||
"port": 4840,
|
|
||||||
"maximumLink": 5,
|
|
||||||
"securityMode": 0,
|
|
||||||
"identifierType": "String",
|
|
||||||
"mapping_table": []
|
|
||||||
},
|
|
||||||
"bindConfig": {
|
|
||||||
"enable": 0,
|
|
||||||
"bind": {
|
|
||||||
"modelId": "",
|
|
||||||
"modelName": "",
|
|
||||||
"srcId": "",
|
|
||||||
"srcName": "",
|
|
||||||
"devId": "",
|
|
||||||
"devName": ""
|
|
||||||
},
|
|
||||||
"varGroups": [],
|
|
||||||
"variables": [],
|
|
||||||
"alerts": []
|
|
||||||
},
|
|
||||||
"southMetadata": {},
|
|
||||||
"bindMetadata": {
|
|
||||||
"version": "",
|
|
||||||
"timestamp": ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
BIN
Pub_Sub/abbflow/v1/pub/.DS_Store
vendored
BIN
Pub_Sub/abbflow/v1/pub/.DS_Store
vendored
Binary file not shown.
@@ -1,14 +0,0 @@
|
|||||||
# Enter your python code.
|
|
||||||
import json
|
|
||||||
from common.Logger import logger
|
|
||||||
from quickfaas.remotebus import publish
|
|
||||||
import re, uuid
|
|
||||||
|
|
||||||
|
|
||||||
def sendData(message):
|
|
||||||
logger.debug(message)
|
|
||||||
mac = ':'.join(re.findall('..', '%012x' % uuid.getnode()))
|
|
||||||
for measure in message["measures"]:
|
|
||||||
publish(__topic__ + mac + ":01:99/" + measure["name"], json.dumps([{"value": measure["value"]}]), __qos__)
|
|
||||||
#publish(__topic__, json.dumps({measure["name"]: measure["value"]}), __qos__)
|
|
||||||
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
# Enter your python code.
|
|
||||||
import json
|
|
||||||
from quickfaas.measure import recall
|
|
||||||
from common.Logger import logger
|
|
||||||
|
|
||||||
def sync(mac, wizard_api):
|
|
||||||
#get new values and send
|
|
||||||
try:
|
|
||||||
data = recall()#json.loads(recall().decode("utf-8"))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(e)
|
|
||||||
logger.info(data)
|
|
||||||
for controller in data:
|
|
||||||
for measure in controller["measures"]:
|
|
||||||
#publish measure
|
|
||||||
topic = "meshify/db/194/_/abbflow/" + mac + "/" + measure["name"]
|
|
||||||
payload = [{"value": measure["value"]}]
|
|
||||||
logger.debug("Sending on topic: {}".format(topic))
|
|
||||||
logger.debug("Sending value: {}".format(payload))
|
|
||||||
wizard_api.mqtt_publish(topic, json.dumps(payload))
|
|
||||||
|
|
||||||
def receiveCommand(topic, payload, wizard_api):
|
|
||||||
logger.debug(topic)
|
|
||||||
logger.debug(json.loads(payload))
|
|
||||||
p = json.loads(payload)[0]
|
|
||||||
command = p["payload"]["name"].split(".")[1]
|
|
||||||
commands = {
|
|
||||||
"sync": sync
|
|
||||||
}
|
|
||||||
commands[command](p["mac"].lower(), wizard_api)
|
|
||||||
#logger.debug(command)
|
|
||||||
ack(p["msgId"], p["mac"], p["payload"]["name"].split(".")[1], p["payload"]["value"], wizard_api)
|
|
||||||
|
|
||||||
def ack(msgid, mac, name, value, wizard_api):
|
|
||||||
#logger.debug(mac)
|
|
||||||
mac = "".join(mac.split(":")[:-2])
|
|
||||||
#logger.debug(msgid)
|
|
||||||
#logger.debug(mac)
|
|
||||||
#logger.debug(name)
|
|
||||||
#logger.debug(value)
|
|
||||||
wizard_api.mqtt_publish("meshify/responses/" + str(msgid), json.dumps([{"value": "{} Success Setting: {} To: {}".format(mac,name, value), "msgid": str(msgid)}]))
|
|
||||||
BIN
Pub_Sub/abbflow/v2/.DS_Store
vendored
BIN
Pub_Sub/abbflow/v2/.DS_Store
vendored
Binary file not shown.
@@ -1,382 +0,0 @@
|
|||||||
{
|
|
||||||
"controllers": [
|
|
||||||
{
|
|
||||||
"protocol": "Modbus-RTU",
|
|
||||||
"name": "ABBFlow",
|
|
||||||
"args": {
|
|
||||||
"slaveAddr": 1,
|
|
||||||
"int16Ord": "ab",
|
|
||||||
"int32Ord": "abcd",
|
|
||||||
"float32Ord": "abcd",
|
|
||||||
"continuousAcquisition": 1,
|
|
||||||
"maxContinuousNumber": 64
|
|
||||||
},
|
|
||||||
"endpoint": "rs232",
|
|
||||||
"samplePeriod": 10,
|
|
||||||
"expired": 10000
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"groups": [
|
|
||||||
{
|
|
||||||
"name": "default",
|
|
||||||
"uploadInterval": 3600,
|
|
||||||
"reference": 10
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"measures": [
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "volume_flow",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44003",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44003"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "today_volume",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44005",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44005"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "battery_voltage",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44001",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44001"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "differential_pressure",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44013",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44013"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "yesterday_volume",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44007",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44007"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "accumulated_volume",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44009",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44009"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "static_pressure",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44015",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44015"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "temperature",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44017",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44017"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "charger_voltage",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44019",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44019"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ctrlName": "ABBFlow",
|
|
||||||
"dataType": "FLOAT",
|
|
||||||
"readWrite": "ro",
|
|
||||||
"uploadType": "periodic",
|
|
||||||
"group": "default",
|
|
||||||
"decimal": 2,
|
|
||||||
"name": "last_calculation_period_volume",
|
|
||||||
"desc": "",
|
|
||||||
"unit": "",
|
|
||||||
"transformType": 0,
|
|
||||||
"gain": "1.0",
|
|
||||||
"offset": "0.0",
|
|
||||||
"regAddr": "44012",
|
|
||||||
"maxValue": "",
|
|
||||||
"minValue": "",
|
|
||||||
"maxScaleValue": "",
|
|
||||||
"minScaleValue": "",
|
|
||||||
"addr": "44012"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"alarms": [],
|
|
||||||
"misc": {
|
|
||||||
"maxAlarmRecordSz": 2000,
|
|
||||||
"logLvl": "DEBUG",
|
|
||||||
"coms": [
|
|
||||||
{
|
|
||||||
"name": "rs232",
|
|
||||||
"baud": 9600,
|
|
||||||
"bits": 8,
|
|
||||||
"stopbits": 1,
|
|
||||||
"parityChk": "n"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "rs485",
|
|
||||||
"baud": 9600,
|
|
||||||
"bits": 8,
|
|
||||||
"stopbits": 1,
|
|
||||||
"parityChk": "n"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"clouds": [
|
|
||||||
{
|
|
||||||
"cacheSize": 100,
|
|
||||||
"enable": 1,
|
|
||||||
"name": "default",
|
|
||||||
"type": "Standard MQTT",
|
|
||||||
"args": {
|
|
||||||
"host": "mq194.imistaway.net",
|
|
||||||
"port": 1883,
|
|
||||||
"clientId": "abbflow-inhand-test",
|
|
||||||
"auth": 1,
|
|
||||||
"tls": 0,
|
|
||||||
"cleanSession": 1,
|
|
||||||
"mqttVersion": "v3.1.1",
|
|
||||||
"keepalive": 60,
|
|
||||||
"key": "",
|
|
||||||
"cert": "",
|
|
||||||
"rootCA": "",
|
|
||||||
"verifyServer": 0,
|
|
||||||
"verifyClient": 0,
|
|
||||||
"username": "admin",
|
|
||||||
"passwd": "columbus",
|
|
||||||
"authType": 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"labels": [
|
|
||||||
{
|
|
||||||
"key": "SN",
|
|
||||||
"value": "GF5022137006262"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key": "MAC",
|
|
||||||
"value": "00:18:05:1a:e5:57"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key": "MAC_UPPER",
|
|
||||||
"value": "00:18:05:1A:E5:57"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key": "MAC_LOWER",
|
|
||||||
"value": "00:18:05:1a:e5:57"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"quickfaas": {
|
|
||||||
"genericFuncs": [],
|
|
||||||
"uploadFuncs": [
|
|
||||||
{
|
|
||||||
"name": "Mistaway",
|
|
||||||
"trigger": "measure_event",
|
|
||||||
"topic": "meshify/db/194/_/abbflow/${MAC_LOWER}",
|
|
||||||
"qos": 1,
|
|
||||||
"groups": [
|
|
||||||
"default"
|
|
||||||
],
|
|
||||||
"funcName": "sendData",
|
|
||||||
"script": "# Enter your python code.\nimport json\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nimport re, uuid\nfrom paho.mqtt import client\n\nlwtData = {\n \"init\":False,\n \"client\": client.Client(client_id=str(uuid.uuid4()), clean_session=True, userdata=None, protocol=client.MQTTv311, transport=\"tcp\")\n}\ndef lwt(mac):\n try:\n #if not lwtData[\"connected\"]:\n if not lwtData[\"init\"]:\n logger.info(\"INITIALIZING LWT CLIENT\")\n lwtData[\"client\"].username_pw_set(username=\"admin\", password=\"columbus\")\n lwtData[\"client\"].will_set(\"meshify/db/194/_/mainHP/\" + mac + \":00:00/connected\",json.dumps({\"value\":False}))\n lwtData[\"init\"] = True\n logger.info(\"Connecting to MQTT Broker for LWT purposes!!!!!!!\")\n lwtData[\"client\"].connect(\"mq194.imistaway.net\",1883, 600)\n #publish(\"meshify/db/194/_/mainHP/00:18:05:1a:e5:37:00:00/connected\", json.dumps({\"value\":True}), __qos__)\n lwtData[\"client\"].publish(\"meshify/db/194/_/mainHP/\" + mac + \":00:00/connected\", json.dumps({\"value\":True}))\n except Exception as e:\n logger.error(\"LWT DID NOT DO THE THING\")\n logger.error(e)\n\ndef sendData(message):\n logger.debug(message)\n mac = __topic__.split(\"/\")[-1] #':'.join(re.findall('..', '%012x' % uuid.getnode()))\n lwt(mac)\n for measure in message[\"measures\"]:\n publish(__topic__ + \":01:99/\" + measure[\"name\"], json.dumps([{\"value\": measure[\"value\"]}]), __qos__)\n #publish(__topic__, json.dumps({measure[\"name\"]: measure[\"value\"]}), __qos__)\n ",
|
|
||||||
"msgType": 0,
|
|
||||||
"cloudName": "default"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"downloadFuncs": [
|
|
||||||
{
|
|
||||||
"name": "Commands",
|
|
||||||
"topic": "meshify/sets/194/${MAC_UPPER}:01:99",
|
|
||||||
"qos": 1,
|
|
||||||
"funcName": "receiveCommand",
|
|
||||||
"payload_type": "Plaintext",
|
|
||||||
"script": "# Enter your python code.\nimport json\nfrom quickfaas.measure import recall\nfrom common.Logger import logger\n\ndef sync(mac, wizard_api):\n #get new values and send\n try:\n data = recall()#json.loads(recall().decode(\"utf-8\"))\n except Exception as e:\n logger.error(e)\n logger.info(data)\n for controller in data:\n for measure in controller[\"measures\"]:\n #publish measure\n topic = \"meshify/db/194/_/abbflow/\" + mac + \"/\" + measure[\"name\"]\n payload = [{\"value\": measure[\"value\"]}]\n logger.debug(\"Sending on topic: {}\".format(topic))\n logger.debug(\"Sending value: {}\".format(payload))\n wizard_api.mqtt_publish(topic, json.dumps(payload))\n \ndef receiveCommand(topic, payload, wizard_api):\n logger.debug(topic)\n logger.debug(json.loads(payload))\n p = json.loads(payload)[0]\n command = p[\"payload\"][\"name\"].split(\".\")[1]\n commands = {\n \"sync\": sync\n }\n commands[command](p[\"mac\"].lower(), wizard_api)\n #logger.debug(command)\n ack(p[\"msgId\"], p[\"mac\"], p[\"payload\"][\"name\"].split(\".\")[1], p[\"payload\"][\"value\"], wizard_api)\n\ndef ack(msgid, mac, name, value, wizard_api):\n #logger.debug(mac)\n mac = \"\".join(mac.split(\":\")[:-2])\n #logger.debug(msgid)\n #logger.debug(mac)\n #logger.debug(name)\n #logger.debug(value)\n wizard_api.mqtt_publish(\"meshify/responses/\" + str(msgid), json.dumps([{\"value\": \"{} Success Setting: {} To: {}\".format(mac,name, value), \"msgid\": str(msgid)}]))",
|
|
||||||
"msgType": 0,
|
|
||||||
"cloudName": "default",
|
|
||||||
"trigger": "command_event"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"modbusSlave": {
|
|
||||||
"enable": 0,
|
|
||||||
"protocol": "Modbus-TCP",
|
|
||||||
"port": 502,
|
|
||||||
"slaveAddr": 1,
|
|
||||||
"int16Ord": "ab",
|
|
||||||
"int32Ord": "abcd",
|
|
||||||
"float32Ord": "abcd",
|
|
||||||
"maxConnection": 5,
|
|
||||||
"mapping_table": []
|
|
||||||
},
|
|
||||||
"iec104Server": {
|
|
||||||
"enable": 0,
|
|
||||||
"cotSize": 2,
|
|
||||||
"port": 2404,
|
|
||||||
"serverList": [
|
|
||||||
{
|
|
||||||
"asduAddr": 1
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"kValue": 12,
|
|
||||||
"wValue": 8,
|
|
||||||
"t0": 15,
|
|
||||||
"t1": 15,
|
|
||||||
"t2": 10,
|
|
||||||
"t3": 20,
|
|
||||||
"maximumLink": 5,
|
|
||||||
"timeSet": 1,
|
|
||||||
"byteOrder": "abcd",
|
|
||||||
"mapping_table": []
|
|
||||||
},
|
|
||||||
"opcuaServer": {
|
|
||||||
"enable": 0,
|
|
||||||
"port": 4840,
|
|
||||||
"maximumLink": 5,
|
|
||||||
"securityMode": 0,
|
|
||||||
"identifierType": "String",
|
|
||||||
"mapping_table": []
|
|
||||||
},
|
|
||||||
"bindConfig": {
|
|
||||||
"enable": 0,
|
|
||||||
"bind": {
|
|
||||||
"modelId": "",
|
|
||||||
"modelName": "",
|
|
||||||
"srcId": "",
|
|
||||||
"srcName": "",
|
|
||||||
"devId": "",
|
|
||||||
"devName": ""
|
|
||||||
},
|
|
||||||
"varGroups": [],
|
|
||||||
"variables": [],
|
|
||||||
"alerts": []
|
|
||||||
},
|
|
||||||
"southMetadata": {},
|
|
||||||
"bindMetadata": {
|
|
||||||
"version": "",
|
|
||||||
"timestamp": ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
# Enter your python code.
|
|
||||||
import json
|
|
||||||
from common.Logger import logger
|
|
||||||
from quickfaas.remotebus import publish
|
|
||||||
import re, uuid
|
|
||||||
from paho.mqtt import client
|
|
||||||
|
|
||||||
lwtData = {
|
|
||||||
"init":False,
|
|
||||||
"client": client.Client(client_id=str(uuid.uuid4()), clean_session=True, userdata=None, protocol=client.MQTTv311, transport="tcp")
|
|
||||||
}
|
|
||||||
def lwt(mac):
|
|
||||||
try:
|
|
||||||
#if not lwtData["connected"]:
|
|
||||||
if not lwtData["init"]:
|
|
||||||
logger.info("INITIALIZING LWT CLIENT")
|
|
||||||
lwtData["client"].username_pw_set(username="admin", password="columbus")
|
|
||||||
lwtData["client"].will_set("meshify/db/194/_/mainHP/" + mac + ":00:00/connected",json.dumps({"value":False}))
|
|
||||||
lwtData["init"] = True
|
|
||||||
logger.info("Connecting to MQTT Broker for LWT purposes!!!!!!!")
|
|
||||||
lwtData["client"].connect("mq194.imistaway.net",1883, 600)
|
|
||||||
#publish("meshify/db/194/_/mainHP/00:18:05:1a:e5:37:00:00/connected", json.dumps({"value":True}), __qos__)
|
|
||||||
lwtData["client"].publish("meshify/db/194/_/mainHP/" + mac + ":00:00/connected", json.dumps({"value":True}))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error("LWT DID NOT DO THE THING")
|
|
||||||
logger.error(e)
|
|
||||||
|
|
||||||
def sendData(message):
|
|
||||||
logger.debug(message)
|
|
||||||
mac = __topic__.split("/")[-1] #':'.join(re.findall('..', '%012x' % uuid.getnode()))
|
|
||||||
lwt(mac)
|
|
||||||
for measure in message["measures"]:
|
|
||||||
publish(__topic__ + ":01:99/" + measure["name"], json.dumps([{"value": measure["value"]}]), __qos__)
|
|
||||||
#publish(__topic__, json.dumps({measure["name"]: measure["value"]}), __qos__)
|
|
||||||
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
# Enter your python code.
|
|
||||||
import json
|
|
||||||
from quickfaas.measure import recall
|
|
||||||
from common.Logger import logger
|
|
||||||
|
|
||||||
def sync(mac, wizard_api):
|
|
||||||
#get new values and send
|
|
||||||
try:
|
|
||||||
data = recall()#json.loads(recall().decode("utf-8"))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(e)
|
|
||||||
logger.info(data)
|
|
||||||
for controller in data:
|
|
||||||
for measure in controller["measures"]:
|
|
||||||
#publish measure
|
|
||||||
topic = "meshify/db/194/_/abbflow/" + mac + "/" + measure["name"]
|
|
||||||
payload = [{"value": measure["value"]}]
|
|
||||||
logger.debug("Sending on topic: {}".format(topic))
|
|
||||||
logger.debug("Sending value: {}".format(payload))
|
|
||||||
wizard_api.mqtt_publish(topic, json.dumps(payload))
|
|
||||||
|
|
||||||
def receiveCommand(topic, payload, wizard_api):
|
|
||||||
logger.debug(topic)
|
|
||||||
logger.debug(json.loads(payload))
|
|
||||||
p = json.loads(payload)[0]
|
|
||||||
command = p["payload"]["name"].split(".")[1]
|
|
||||||
commands = {
|
|
||||||
"sync": sync
|
|
||||||
}
|
|
||||||
commands[command](p["mac"].lower(), wizard_api)
|
|
||||||
#logger.debug(command)
|
|
||||||
ack(p["msgId"], p["mac"], p["payload"]["name"].split(".")[1], p["payload"]["value"], wizard_api)
|
|
||||||
|
|
||||||
def ack(msgid, mac, name, value, wizard_api):
|
|
||||||
#logger.debug(mac)
|
|
||||||
mac = "".join(mac.split(":")[:-2])
|
|
||||||
#logger.debug(msgid)
|
|
||||||
#logger.debug(mac)
|
|
||||||
#logger.debug(name)
|
|
||||||
#logger.debug(value)
|
|
||||||
wizard_api.mqtt_publish("meshify/responses/" + str(msgid), json.dumps([{"value": "{} Success Setting: {} To: {}".format(mac,name, value), "msgid": str(msgid)}]))
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# Enter your python code.
|
# Enter your python code.
|
||||||
import json, os
|
import json, os, time
|
||||||
from datetime import datetime as dt
|
from datetime import datetime as dt
|
||||||
from common.Logger import logger
|
from common.Logger import logger
|
||||||
from quickfaas.remotebus import publish
|
from quickfaas.remotebus import publish
|
||||||
@@ -117,6 +117,15 @@ def checkParameterConfig(cfg):
|
|||||||
|
|
||||||
return cfg
|
return cfg
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
def sendData(message):
|
def sendData(message):
|
||||||
#logger.debug(message)
|
#logger.debug(message)
|
||||||
try:
|
try:
|
||||||
@@ -140,7 +149,9 @@ def sendData(message):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
publish(__topic__, json.dumps(payload), __qos__)
|
for chunk in chunk_payload(payload=payload):
|
||||||
|
publish(__topic__, json.dumps(chunk), __qos__)
|
||||||
|
time.sleep(2)
|
||||||
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||||
|
|
||||||
def convert_int(plc_tag, value):
|
def convert_int(plc_tag, value):
|
||||||
|
|||||||
@@ -1,11 +1,21 @@
|
|||||||
import json, time
|
import json, time
|
||||||
|
from datetime import datetime as dt
|
||||||
from quickfaas.measure import recall, write
|
from quickfaas.measure import recall, write
|
||||||
from quickfaas.remotebus import publish
|
from quickfaas.remotebus import publish
|
||||||
from common.Logger import logger
|
from common.Logger import logger
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
def sync():
|
def sync():
|
||||||
#get new values and send
|
#get new values and send
|
||||||
payload = {}
|
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
|
||||||
topic = "v1/devices/me/telemetry"
|
topic = "v1/devices/me/telemetry"
|
||||||
try:
|
try:
|
||||||
data = recall()#json.loads(recall().decode("utf-8"))
|
data = recall()#json.loads(recall().decode("utf-8"))
|
||||||
@@ -22,7 +32,9 @@ def sync():
|
|||||||
payload[measure["name"]] = measure["value"]
|
payload[measure["name"]] = measure["value"]
|
||||||
logger.debug("Sending on topic: {}".format(topic))
|
logger.debug("Sending on topic: {}".format(topic))
|
||||||
logger.debug("Sending value: {}".format(payload))
|
logger.debug("Sending value: {}".format(payload))
|
||||||
publish(topic, json.dumps(payload), 1)
|
for chunk in chunk_payload(payload=payload):
|
||||||
|
publish(topic, json.dumps(chunk), 1)
|
||||||
|
time.sleep(2)
|
||||||
def writeplctag(value):
|
def writeplctag(value):
|
||||||
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
|
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Enter your python code.
|
# Enter your python code.
|
||||||
import json, os
|
import json, os, time
|
||||||
from datetime import datetime as dt
|
from datetime import datetime as dt
|
||||||
from common.Logger import logger
|
from common.Logger import logger
|
||||||
from quickfaas.remotebus import publish
|
from quickfaas.remotebus import publish
|
||||||
@@ -117,6 +117,15 @@ def checkParameterConfig(cfg):
|
|||||||
|
|
||||||
return cfg
|
return cfg
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
def sendData(message):
|
def sendData(message):
|
||||||
#logger.debug(message)
|
#logger.debug(message)
|
||||||
try:
|
try:
|
||||||
@@ -140,7 +149,9 @@ def sendData(message):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
publish(__topic__, json.dumps(payload), __qos__)
|
for chunk in chunk_payload(payload=payload):
|
||||||
|
publish(__topic__, json.dumps(chunk), __qos__)
|
||||||
|
time.sleep(2)
|
||||||
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||||
|
|
||||||
def convert_int(plc_tag, value):
|
def convert_int(plc_tag, value):
|
||||||
|
|||||||
@@ -1,11 +1,21 @@
|
|||||||
import json, time
|
import json, time
|
||||||
|
from datetime import datetime as dt
|
||||||
from quickfaas.measure import recall, write
|
from quickfaas.measure import recall, write
|
||||||
from quickfaas.remotebus import publish
|
from quickfaas.remotebus import publish
|
||||||
from common.Logger import logger
|
from common.Logger import logger
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
def sync():
|
def sync():
|
||||||
#get new values and send
|
#get new values and send
|
||||||
payload = {}
|
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
|
||||||
topic = "v1/devices/me/telemetry"
|
topic = "v1/devices/me/telemetry"
|
||||||
try:
|
try:
|
||||||
data = recall()#json.loads(recall().decode("utf-8"))
|
data = recall()#json.loads(recall().decode("utf-8"))
|
||||||
@@ -22,7 +32,9 @@ def sync():
|
|||||||
payload[measure["name"]] = measure["value"]
|
payload[measure["name"]] = measure["value"]
|
||||||
logger.debug("Sending on topic: {}".format(topic))
|
logger.debug("Sending on topic: {}".format(topic))
|
||||||
logger.debug("Sending value: {}".format(payload))
|
logger.debug("Sending value: {}".format(payload))
|
||||||
publish(topic, json.dumps(payload), 1)
|
for chunk in chunk_payload(payload=payload):
|
||||||
|
publish(topic, json.dumps(chunk), 1)
|
||||||
|
time.sleep(2)
|
||||||
def writeplctag(value):
|
def writeplctag(value):
|
||||||
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
|
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
|
||||||
try:
|
try:
|
||||||
|
|||||||
201
Pub_Sub/advvfdipp_wo_downhole/thingsboard/v1/pub/sendAlarms.py
Normal file
201
Pub_Sub/advvfdipp_wo_downhole/thingsboard/v1/pub/sendAlarms.py
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
# Enter your python code.
|
||||||
|
import json, time
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.measure import recall
|
||||||
|
|
||||||
|
|
||||||
|
def sendAlarm(message):
|
||||||
|
logger.info(message)
|
||||||
|
payload = {}
|
||||||
|
payload["ts"] = time.time()*1000
|
||||||
|
payload["values"] = {message["measureName"]: message["value"]}
|
||||||
|
publish(__topic__, json.dumps(payload), __qos__)
|
||||||
|
sync()
|
||||||
|
|
||||||
|
def sync():
|
||||||
|
#get new values and send
|
||||||
|
payload = {"ts": time.time()*1000, "values": {}}
|
||||||
|
try:
|
||||||
|
data = recall()#json.loads(recall().decode("utf-8"))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
logger.debug(data)
|
||||||
|
for controller in data:
|
||||||
|
for measure in controller["measures"]:
|
||||||
|
#publish measure
|
||||||
|
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
|
||||||
|
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
|
||||||
|
payload["values"][measure["name"]+ "_int"] = measure["value"]
|
||||||
|
else:
|
||||||
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
|
logger.debug("Sending on topic: {}".format(__topic__))
|
||||||
|
logger.debug("Sending value: {}".format(payload))
|
||||||
|
publish(__topic__, json.dumps(payload), 1)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_int(plc_tag, value):
|
||||||
|
well_status_codes = {
|
||||||
|
0: "Running",
|
||||||
|
1: "Pumped Off",
|
||||||
|
2: "Alarmed",
|
||||||
|
3: "Locked Out",
|
||||||
|
4: "Stopped"
|
||||||
|
}
|
||||||
|
|
||||||
|
pid_control_codes = {
|
||||||
|
0: "Flow",
|
||||||
|
1: "Fluid Level",
|
||||||
|
2: "Tubing Pressure",
|
||||||
|
3: "Manual"
|
||||||
|
}
|
||||||
|
|
||||||
|
downhole_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Connecting",
|
||||||
|
2: "Open Circuit",
|
||||||
|
3: "Shorted",
|
||||||
|
4: "Cannot Decode"
|
||||||
|
}
|
||||||
|
|
||||||
|
permissive_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Flow",
|
||||||
|
2: "Intake Pressure",
|
||||||
|
3: "Intake Temperature",
|
||||||
|
4: "Tubing Pressure",
|
||||||
|
5: "VFD",
|
||||||
|
6: "Fluid Level",
|
||||||
|
7: "Min. Downtime"
|
||||||
|
}
|
||||||
|
|
||||||
|
alarm_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Alarm"
|
||||||
|
}
|
||||||
|
|
||||||
|
alarm_vfd_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Locked Out"
|
||||||
|
}
|
||||||
|
|
||||||
|
vfd_fault_codes = {
|
||||||
|
0: "No Fault",
|
||||||
|
2: "Auxiliary Input",
|
||||||
|
3: "Power Loss",
|
||||||
|
4: "UnderVoltage",
|
||||||
|
5: "OverVoltage",
|
||||||
|
7: "Motor Overload",
|
||||||
|
8: "Heatsink OverTemp",
|
||||||
|
9: "Thermister OverTemp",
|
||||||
|
10: "Dynamic Brake OverTemp",
|
||||||
|
12: "Hardware OverCurrent",
|
||||||
|
13: "Ground Fault",
|
||||||
|
14: "Ground Warning",
|
||||||
|
15: "Load Loss",
|
||||||
|
17: "Input Phase Loss",
|
||||||
|
18: "Motor PTC Trip",
|
||||||
|
19: "Task Overrun",
|
||||||
|
20: "Torque Prove Speed Band",
|
||||||
|
21: "Output Phase Loss",
|
||||||
|
24: "Decel Inhibit",
|
||||||
|
25: "OverSpeed Limit",
|
||||||
|
26: "Brake Slipped",
|
||||||
|
27: "Torque Prove Conflict",
|
||||||
|
28: "TP Encls Confict",
|
||||||
|
29: "Analog In Loss",
|
||||||
|
33: "Auto Restarts Exhausted",
|
||||||
|
35: "IPM OverCurrent",
|
||||||
|
36: "SW OverCurrent",
|
||||||
|
38: "Phase U to Ground",
|
||||||
|
39: "Phase V to Ground",
|
||||||
|
40: "Phase W to Ground",
|
||||||
|
41: "Phase UV Short",
|
||||||
|
42: "Phase VW Short",
|
||||||
|
43: "Phase WU Short",
|
||||||
|
44: "Phase UNeg to Ground",
|
||||||
|
45: "Phase VNeg to Ground",
|
||||||
|
46: "Phase WNeg to Ground",
|
||||||
|
48: "System Defaulted",
|
||||||
|
49: "Drive Powerup",
|
||||||
|
51: "Clear Fault Queue",
|
||||||
|
55: "Control Board Overtemp",
|
||||||
|
59: "Invalid Code",
|
||||||
|
61: "Shear Pin 1",
|
||||||
|
62: "Shear Pin 2",
|
||||||
|
64: "Drive Overload",
|
||||||
|
66: "OW Torque Level",
|
||||||
|
67: "Pump Off",
|
||||||
|
71: "Port 1 Adapter",
|
||||||
|
72: "Port 2 Adapter",
|
||||||
|
73: "Port 3 Adapter",
|
||||||
|
74: "Port 4 Adapter",
|
||||||
|
75: "Port 5 Adapter",
|
||||||
|
76: "Port 6 Adapter",
|
||||||
|
77: "IR Volts Range",
|
||||||
|
78: "FluxAmps Ref Range",
|
||||||
|
79: "Excessive Load",
|
||||||
|
80: "AutoTune Aborted",
|
||||||
|
81: "Port 1 DPI Loss",
|
||||||
|
82: "Port 2 DPI Loss",
|
||||||
|
83: "Port 3 DPI Loss",
|
||||||
|
84: "Port 4 DPI Loss",
|
||||||
|
85: "Port 5 DPI Loss",
|
||||||
|
86: "Port 6 DPI Loss",
|
||||||
|
87: "IXo Voltage Range",
|
||||||
|
91: "Primary Velocity Feedback Loss",
|
||||||
|
93: "Hardware Enable Check",
|
||||||
|
94: "Alternate Velocity Feedback Loss",
|
||||||
|
95: "Auxiliary Velocity Feedback Loss",
|
||||||
|
96: "Position Feedback Loss",
|
||||||
|
97: "Auto Tach Switch",
|
||||||
|
100: "Parameter Checksum",
|
||||||
|
101: "Power Down NVS Blank",
|
||||||
|
102: "NVS Not Blank",
|
||||||
|
103: "Power Down NVS Incompatible",
|
||||||
|
104: "Power Board Checksum",
|
||||||
|
106: "Incompat MCB-PB",
|
||||||
|
107: "Replaced MCB-PB",
|
||||||
|
108: "Analog Calibration Checksum",
|
||||||
|
110: "Invalid Power Board Data",
|
||||||
|
111: "Power Board Invalid ID",
|
||||||
|
112: "Power Board App Min Version",
|
||||||
|
113: "Tracking DataError",
|
||||||
|
115: "Power Down Table Full",
|
||||||
|
116: "Power Down Entry Too Large",
|
||||||
|
117: "Power Down Data Checksum",
|
||||||
|
118: "Power Board Power Down Checksum",
|
||||||
|
124: "App ID Changed",
|
||||||
|
125: "Using Backup App",
|
||||||
|
134: "Start on Power Up",
|
||||||
|
137: "External Precharge Error",
|
||||||
|
138: "Precharge Open",
|
||||||
|
141: "Autotune Enc Angle",
|
||||||
|
142: "Autotune Speed Restricted",
|
||||||
|
143: "Autotune Current Regulator",
|
||||||
|
144: "Autotune Inertia",
|
||||||
|
145: "Autotune Travel",
|
||||||
|
13035: "Net IO Timeout",
|
||||||
|
13037: "Net IO Timeout"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
plc_tags = {
|
||||||
|
"wellstatus": well_status_codes.get(value, "Invalid Code"),
|
||||||
|
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
|
||||||
|
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"runpermissive": permissive_codes.get(value, "Invalid Code"),
|
||||||
|
"startpermissive": permissive_codes.get(value, "Invalid Code"),
|
||||||
|
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
|
||||||
|
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
|
||||||
|
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
|
||||||
|
}
|
||||||
|
|
||||||
|
return plc_tags.get(plc_tag, "Invalid Tag")
|
||||||
323
Pub_Sub/advvfdipp_wo_downhole/thingsboard/v1/pub/sendData.py
Normal file
323
Pub_Sub/advvfdipp_wo_downhole/thingsboard/v1/pub/sendData.py
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
# Enter your python code.
|
||||||
|
import json, os, time
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
def reboot(reason="Rebooting for config file update"):
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
logger.info(reason)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.debug("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.debug("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.debug("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.debug(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.debug("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.debug("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
logger.debug("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.debug("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.debug("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.debug("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
|
def sendData(message):
|
||||||
|
#logger.debug(message)
|
||||||
|
try:
|
||||||
|
checkCredentialConfig()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
||||||
|
for measure in message["measures"]:
|
||||||
|
try:
|
||||||
|
logger.debug(measure)
|
||||||
|
if abs(payload["ts"]/1000 - measure["timestamp"]) > 3600:
|
||||||
|
reboot(reason="Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}".format(payload["ts"]/1000,measure["timestamp"]))
|
||||||
|
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
|
||||||
|
logger.debug("Converting DINT/BOOL to STRING")
|
||||||
|
value = convert_int(measure["name"], measure["value"])
|
||||||
|
logger.debug("Converted {} to {}".format(measure["value"], value))
|
||||||
|
payload["values"][measure["name"]] = value
|
||||||
|
payload["values"][measure["name"] + "_int"] = measure["value"]
|
||||||
|
else:
|
||||||
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
|
||||||
|
for chunk in chunk_payload(payload=payload):
|
||||||
|
publish(__topic__, json.dumps(chunk), __qos__)
|
||||||
|
time.sleep(2)
|
||||||
|
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||||
|
|
||||||
|
def convert_int(plc_tag, value):
|
||||||
|
well_status_codes = {
|
||||||
|
0: "Running",
|
||||||
|
1: "Pumped Off",
|
||||||
|
2: "Alarmed",
|
||||||
|
3: "Locked Out",
|
||||||
|
4: "Stopped"
|
||||||
|
}
|
||||||
|
|
||||||
|
pid_control_codes = {
|
||||||
|
0: "Flow",
|
||||||
|
1: "Fluid Level",
|
||||||
|
2: "Tubing Pressure",
|
||||||
|
3: "Manual"
|
||||||
|
}
|
||||||
|
|
||||||
|
downhole_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Connecting",
|
||||||
|
2: "Open Circuit",
|
||||||
|
3: "Shorted",
|
||||||
|
4: "Cannot Decode"
|
||||||
|
}
|
||||||
|
|
||||||
|
permissive_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Flow",
|
||||||
|
2: "Intake Pressure",
|
||||||
|
3: "Intake Temperature",
|
||||||
|
4: "Tubing Pressure",
|
||||||
|
5: "VFD",
|
||||||
|
6: "Fluid Level",
|
||||||
|
7: "Min. Downtime"
|
||||||
|
}
|
||||||
|
|
||||||
|
alarm_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Alarm"
|
||||||
|
}
|
||||||
|
|
||||||
|
alarm_vfd_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Locked Out"
|
||||||
|
}
|
||||||
|
|
||||||
|
vfd_fault_codes = {
|
||||||
|
0: "No Fault",
|
||||||
|
2: "Auxiliary Input",
|
||||||
|
3: "Power Loss",
|
||||||
|
4: "UnderVoltage",
|
||||||
|
5: "OverVoltage",
|
||||||
|
7: "Motor Overload",
|
||||||
|
8: "Heatsink OverTemp",
|
||||||
|
9: "Thermister OverTemp",
|
||||||
|
10: "Dynamic Brake OverTemp",
|
||||||
|
12: "Hardware OverCurrent",
|
||||||
|
13: "Ground Fault",
|
||||||
|
14: "Ground Warning",
|
||||||
|
15: "Load Loss",
|
||||||
|
17: "Input Phase Loss",
|
||||||
|
18: "Motor PTC Trip",
|
||||||
|
19: "Task Overrun",
|
||||||
|
20: "Torque Prove Speed Band",
|
||||||
|
21: "Output Phase Loss",
|
||||||
|
24: "Decel Inhibit",
|
||||||
|
25: "OverSpeed Limit",
|
||||||
|
26: "Brake Slipped",
|
||||||
|
27: "Torque Prove Conflict",
|
||||||
|
28: "TP Encls Confict",
|
||||||
|
29: "Analog In Loss",
|
||||||
|
33: "Auto Restarts Exhausted",
|
||||||
|
35: "IPM OverCurrent",
|
||||||
|
36: "SW OverCurrent",
|
||||||
|
38: "Phase U to Ground",
|
||||||
|
39: "Phase V to Ground",
|
||||||
|
40: "Phase W to Ground",
|
||||||
|
41: "Phase UV Short",
|
||||||
|
42: "Phase VW Short",
|
||||||
|
43: "Phase WU Short",
|
||||||
|
44: "Phase UNeg to Ground",
|
||||||
|
45: "Phase VNeg to Ground",
|
||||||
|
46: "Phase WNeg to Ground",
|
||||||
|
48: "System Defaulted",
|
||||||
|
49: "Drive Powerup",
|
||||||
|
51: "Clear Fault Queue",
|
||||||
|
55: "Control Board Overtemp",
|
||||||
|
59: "Invalid Code",
|
||||||
|
61: "Shear Pin 1",
|
||||||
|
62: "Shear Pin 2",
|
||||||
|
64: "Drive Overload",
|
||||||
|
66: "OW Torque Level",
|
||||||
|
67: "Pump Off",
|
||||||
|
71: "Port 1 Adapter",
|
||||||
|
72: "Port 2 Adapter",
|
||||||
|
73: "Port 3 Adapter",
|
||||||
|
74: "Port 4 Adapter",
|
||||||
|
75: "Port 5 Adapter",
|
||||||
|
76: "Port 6 Adapter",
|
||||||
|
77: "IR Volts Range",
|
||||||
|
78: "FluxAmps Ref Range",
|
||||||
|
79: "Excessive Load",
|
||||||
|
80: "AutoTune Aborted",
|
||||||
|
81: "Port 1 DPI Loss",
|
||||||
|
82: "Port 2 DPI Loss",
|
||||||
|
83: "Port 3 DPI Loss",
|
||||||
|
84: "Port 4 DPI Loss",
|
||||||
|
85: "Port 5 DPI Loss",
|
||||||
|
86: "Port 6 DPI Loss",
|
||||||
|
87: "IXo Voltage Range",
|
||||||
|
91: "Primary Velocity Feedback Loss",
|
||||||
|
93: "Hardware Enable Check",
|
||||||
|
94: "Alternate Velocity Feedback Loss",
|
||||||
|
95: "Auxiliary Velocity Feedback Loss",
|
||||||
|
96: "Position Feedback Loss",
|
||||||
|
97: "Auto Tach Switch",
|
||||||
|
100: "Parameter Checksum",
|
||||||
|
101: "Power Down NVS Blank",
|
||||||
|
102: "NVS Not Blank",
|
||||||
|
103: "Power Down NVS Incompatible",
|
||||||
|
104: "Power Board Checksum",
|
||||||
|
106: "Incompat MCB-PB",
|
||||||
|
107: "Replaced MCB-PB",
|
||||||
|
108: "Analog Calibration Checksum",
|
||||||
|
110: "Invalid Power Board Data",
|
||||||
|
111: "Power Board Invalid ID",
|
||||||
|
112: "Power Board App Min Version",
|
||||||
|
113: "Tracking DataError",
|
||||||
|
115: "Power Down Table Full",
|
||||||
|
116: "Power Down Entry Too Large",
|
||||||
|
117: "Power Down Data Checksum",
|
||||||
|
118: "Power Board Power Down Checksum",
|
||||||
|
124: "App ID Changed",
|
||||||
|
125: "Using Backup App",
|
||||||
|
134: "Start on Power Up",
|
||||||
|
137: "External Precharge Error",
|
||||||
|
138: "Precharge Open",
|
||||||
|
141: "Autotune Enc Angle",
|
||||||
|
142: "Autotune Speed Restricted",
|
||||||
|
143: "Autotune Current Regulator",
|
||||||
|
144: "Autotune Inertia",
|
||||||
|
145: "Autotune Travel",
|
||||||
|
13035: "Net IO Timeout",
|
||||||
|
13037: "Net IO Timeout"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
plc_tags = {
|
||||||
|
"wellstatus": well_status_codes.get(value, "Invalid Code"),
|
||||||
|
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
|
||||||
|
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"runpermissive": permissive_codes.get(value, "Invalid Code"),
|
||||||
|
"startpermissive": permissive_codes.get(value, "Invalid Code"),
|
||||||
|
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
|
||||||
|
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
|
||||||
|
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
|
||||||
|
}
|
||||||
|
|
||||||
|
return plc_tags.get(plc_tag, "Invalid Tag")
|
||||||
|
|
||||||
|
|
||||||
@@ -0,0 +1,278 @@
|
|||||||
|
import json, time
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from quickfaas.measure import recall, write
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from common.Logger import logger
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
|
def sync():
|
||||||
|
#get new values and send
|
||||||
|
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
|
||||||
|
topic = "v1/devices/me/telemetry"
|
||||||
|
try:
|
||||||
|
data = recall()#json.loads(recall().decode("utf-8"))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
logger.debug(data)
|
||||||
|
for controller in data:
|
||||||
|
for measure in controller["measures"]:
|
||||||
|
#publish measure
|
||||||
|
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
|
||||||
|
payload[measure["name"]] = convert_int(measure["name"], measure["value"])
|
||||||
|
payload[measure["name"]+ "_int"] = measure["value"]
|
||||||
|
else:
|
||||||
|
payload[measure["name"]] = measure["value"]
|
||||||
|
logger.debug("Sending on topic: {}".format(topic))
|
||||||
|
logger.debug("Sending value: {}".format(payload))
|
||||||
|
for chunk in chunk_payload(payload=payload):
|
||||||
|
publish(topic, json.dumps(chunk), 1)
|
||||||
|
time.sleep(2)
|
||||||
|
def writeplctag(value):
|
||||||
|
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
|
||||||
|
try:
|
||||||
|
#value = json.loads(value.replace("'",'"'))
|
||||||
|
logger.debug(value)
|
||||||
|
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
|
||||||
|
message = [{"name": "advvfdipp", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
|
||||||
|
resp = write(message)
|
||||||
|
logger.debug("RETURN FROM WRITE: {}".format(resp))
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def receiveCommand(topic, payload):
|
||||||
|
try:
|
||||||
|
logger.debug(topic)
|
||||||
|
logger.debug(json.loads(payload))
|
||||||
|
p = json.loads(payload)
|
||||||
|
command = p["method"]
|
||||||
|
commands = {
|
||||||
|
"sync": sync,
|
||||||
|
"writeplctag": writeplctag,
|
||||||
|
}
|
||||||
|
if command == "setPLCTag":
|
||||||
|
try:
|
||||||
|
result = commands["writeplctag"](p["params"])
|
||||||
|
logger.debug(result)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
elif command == "changeSetpoint":
|
||||||
|
try:
|
||||||
|
logger.debug("attempting controlpoint write")
|
||||||
|
params_type = {"measurement": "pidcontrolmode", "value": p["params"]["setpointType"]}
|
||||||
|
if params_type["value"]:
|
||||||
|
commands["writeplctag"](params_type)
|
||||||
|
time.sleep(2)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("DID NOT WRITE CONTROL MODE")
|
||||||
|
logger.error(e)
|
||||||
|
try:
|
||||||
|
logger.debug("attempting setpoint write")
|
||||||
|
modes = {0: "flowsetpoint", 1: "fluidlevelsetpoint", 2: "tubingpressuresetpoint", 3: "manualfrequencysetpoint"}
|
||||||
|
params_value = {"value": p["params"]["setpointValue"]}
|
||||||
|
if params_value["value"]:
|
||||||
|
params_value["measurement"] = modes[getMode()]
|
||||||
|
result = commands["writeplctag"](params_value)
|
||||||
|
logger.debug(result)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("DID NOT WRITE SETPOINT")
|
||||||
|
logger.error(e)
|
||||||
|
#logger.debug(command)
|
||||||
|
ack(topic.split("/")[-1])
|
||||||
|
time.sleep(5)
|
||||||
|
sync()
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(e)
|
||||||
|
|
||||||
|
|
||||||
|
def ack(msgid):
|
||||||
|
#logger.debug(msgid)
|
||||||
|
#logger.debug(mac)
|
||||||
|
#logger.debug(name)
|
||||||
|
#logger.debug(value)
|
||||||
|
publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}), 1)
|
||||||
|
|
||||||
|
def getMode():
|
||||||
|
try:
|
||||||
|
data = recall()
|
||||||
|
for controller in data:
|
||||||
|
for measure in controller["measures"]:
|
||||||
|
if measure["name"] == "pidcontrolmode":
|
||||||
|
return measure["value"]
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def convert_int(plc_tag, value):
|
||||||
|
well_status_codes = {
|
||||||
|
0: "Running",
|
||||||
|
1: "Pumped Off",
|
||||||
|
2: "Alarmed",
|
||||||
|
3: "Locked Out",
|
||||||
|
4: "Stopped"
|
||||||
|
}
|
||||||
|
|
||||||
|
pid_control_codes = {
|
||||||
|
0: "Flow",
|
||||||
|
1: "Fluid Level",
|
||||||
|
2: "Tubing Pressure",
|
||||||
|
3: "Manual"
|
||||||
|
}
|
||||||
|
|
||||||
|
downhole_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Connecting",
|
||||||
|
2: "Open Circuit",
|
||||||
|
3: "Shorted",
|
||||||
|
4: "Cannot Decode"
|
||||||
|
}
|
||||||
|
|
||||||
|
permissive_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Flow",
|
||||||
|
2: "Intake Pressure",
|
||||||
|
3: "Intake Temperature",
|
||||||
|
4: "Tubing Pressure",
|
||||||
|
5: "VFD",
|
||||||
|
6: "Fluid Level",
|
||||||
|
7: "Min. Downtime"
|
||||||
|
}
|
||||||
|
|
||||||
|
alarm_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Alarm"
|
||||||
|
}
|
||||||
|
|
||||||
|
alarm_vfd_codes = {
|
||||||
|
0: "OK",
|
||||||
|
1: "Locked Out"
|
||||||
|
}
|
||||||
|
|
||||||
|
vfd_fault_codes = {
|
||||||
|
0: "No Fault",
|
||||||
|
2: "Auxiliary Input",
|
||||||
|
3: "Power Loss",
|
||||||
|
4: "UnderVoltage",
|
||||||
|
5: "OverVoltage",
|
||||||
|
7: "Motor Overload",
|
||||||
|
8: "Heatsink OverTemp",
|
||||||
|
9: "Thermister OverTemp",
|
||||||
|
10: "Dynamic Brake OverTemp",
|
||||||
|
12: "Hardware OverCurrent",
|
||||||
|
13: "Ground Fault",
|
||||||
|
14: "Ground Warning",
|
||||||
|
15: "Load Loss",
|
||||||
|
17: "Input Phase Loss",
|
||||||
|
18: "Motor PTC Trip",
|
||||||
|
19: "Task Overrun",
|
||||||
|
20: "Torque Prove Speed Band",
|
||||||
|
21: "Output Phase Loss",
|
||||||
|
24: "Decel Inhibit",
|
||||||
|
25: "OverSpeed Limit",
|
||||||
|
26: "Brake Slipped",
|
||||||
|
27: "Torque Prove Conflict",
|
||||||
|
28: "TP Encls Confict",
|
||||||
|
29: "Analog In Loss",
|
||||||
|
33: "Auto Restarts Exhausted",
|
||||||
|
35: "IPM OverCurrent",
|
||||||
|
36: "SW OverCurrent",
|
||||||
|
38: "Phase U to Ground",
|
||||||
|
39: "Phase V to Ground",
|
||||||
|
40: "Phase W to Ground",
|
||||||
|
41: "Phase UV Short",
|
||||||
|
42: "Phase VW Short",
|
||||||
|
43: "Phase WU Short",
|
||||||
|
44: "Phase UNeg to Ground",
|
||||||
|
45: "Phase VNeg to Ground",
|
||||||
|
46: "Phase WNeg to Ground",
|
||||||
|
48: "System Defaulted",
|
||||||
|
49: "Drive Powerup",
|
||||||
|
51: "Clear Fault Queue",
|
||||||
|
55: "Control Board Overtemp",
|
||||||
|
59: "Invalid Code",
|
||||||
|
61: "Shear Pin 1",
|
||||||
|
62: "Shear Pin 2",
|
||||||
|
64: "Drive Overload",
|
||||||
|
66: "OW Torque Level",
|
||||||
|
67: "Pump Off",
|
||||||
|
71: "Port 1 Adapter",
|
||||||
|
72: "Port 2 Adapter",
|
||||||
|
73: "Port 3 Adapter",
|
||||||
|
74: "Port 4 Adapter",
|
||||||
|
75: "Port 5 Adapter",
|
||||||
|
76: "Port 6 Adapter",
|
||||||
|
77: "IR Volts Range",
|
||||||
|
78: "FluxAmps Ref Range",
|
||||||
|
79: "Excessive Load",
|
||||||
|
80: "AutoTune Aborted",
|
||||||
|
81: "Port 1 DPI Loss",
|
||||||
|
82: "Port 2 DPI Loss",
|
||||||
|
83: "Port 3 DPI Loss",
|
||||||
|
84: "Port 4 DPI Loss",
|
||||||
|
85: "Port 5 DPI Loss",
|
||||||
|
86: "Port 6 DPI Loss",
|
||||||
|
87: "IXo Voltage Range",
|
||||||
|
91: "Primary Velocity Feedback Loss",
|
||||||
|
93: "Hardware Enable Check",
|
||||||
|
94: "Alternate Velocity Feedback Loss",
|
||||||
|
95: "Auxiliary Velocity Feedback Loss",
|
||||||
|
96: "Position Feedback Loss",
|
||||||
|
97: "Auto Tach Switch",
|
||||||
|
100: "Parameter Checksum",
|
||||||
|
101: "Power Down NVS Blank",
|
||||||
|
102: "NVS Not Blank",
|
||||||
|
103: "Power Down NVS Incompatible",
|
||||||
|
104: "Power Board Checksum",
|
||||||
|
106: "Incompat MCB-PB",
|
||||||
|
107: "Replaced MCB-PB",
|
||||||
|
108: "Analog Calibration Checksum",
|
||||||
|
110: "Invalid Power Board Data",
|
||||||
|
111: "Power Board Invalid ID",
|
||||||
|
112: "Power Board App Min Version",
|
||||||
|
113: "Tracking DataError",
|
||||||
|
115: "Power Down Table Full",
|
||||||
|
116: "Power Down Entry Too Large",
|
||||||
|
117: "Power Down Data Checksum",
|
||||||
|
118: "Power Board Power Down Checksum",
|
||||||
|
124: "App ID Changed",
|
||||||
|
125: "Using Backup App",
|
||||||
|
134: "Start on Power Up",
|
||||||
|
137: "External Precharge Error",
|
||||||
|
138: "Precharge Open",
|
||||||
|
141: "Autotune Enc Angle",
|
||||||
|
142: "Autotune Speed Restricted",
|
||||||
|
143: "Autotune Current Regulator",
|
||||||
|
144: "Autotune Inertia",
|
||||||
|
145: "Autotune Travel",
|
||||||
|
13035: "Net IO Timeout",
|
||||||
|
13037: "Net IO Timeout"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
plc_tags = {
|
||||||
|
"wellstatus": well_status_codes.get(value, "Invalid Code"),
|
||||||
|
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
|
||||||
|
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
|
||||||
|
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
|
||||||
|
"runpermissive": permissive_codes.get(value, "Invalid Code"),
|
||||||
|
"startpermissive": permissive_codes.get(value, "Invalid Code"),
|
||||||
|
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
|
||||||
|
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
|
||||||
|
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
|
||||||
|
}
|
||||||
|
|
||||||
|
return plc_tags.get(plc_tag, "Invalid Tag")
|
||||||
145
Pub_Sub/fkplcpond/thingsboard/v3/pub/sendData.py
Normal file
145
Pub_Sub/fkplcpond/thingsboard/v3/pub/sendData.py
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
# Enter your python code.
|
||||||
|
import json, os, time
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from common.Logger import logger
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from quickfaas.global_dict import get as get_params
|
||||||
|
from quickfaas.global_dict import _set_global_args
|
||||||
|
|
||||||
|
def reboot():
|
||||||
|
#basic = Basic()
|
||||||
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
||||||
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
||||||
|
logger.info(f"REBOOT : {r}")
|
||||||
|
|
||||||
|
def checkFileExist(filename):
|
||||||
|
path = "/var/user/files"
|
||||||
|
if not os.path.exists(path):
|
||||||
|
logger.info("no folder making files folder in var/user")
|
||||||
|
os.makedirs(path)
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
if not os.path.exists(path + "/" + filename):
|
||||||
|
logger.info("no creds file making creds file")
|
||||||
|
with open(path + "/" + filename, "a") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
def convertDStoJSON(ds):
|
||||||
|
j = dict()
|
||||||
|
for x in ds:
|
||||||
|
j[x["key"]] = x["value"]
|
||||||
|
return j
|
||||||
|
|
||||||
|
def convertJSONtoDS(j):
|
||||||
|
d = []
|
||||||
|
for key in j.keys():
|
||||||
|
d.append({"key": key, "value": j[key]})
|
||||||
|
return d
|
||||||
|
|
||||||
|
def checkCredentialConfig():
|
||||||
|
logger.info("CHECKING CONFIG")
|
||||||
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
||||||
|
credspath = "/var/user/files/creds.json"
|
||||||
|
cfg = dict()
|
||||||
|
with open(cfgpath, "r") as f:
|
||||||
|
cfg = json.load(f)
|
||||||
|
clouds = cfg.get("clouds")
|
||||||
|
logger.info(clouds)
|
||||||
|
#if not configured then try to configure from stored values
|
||||||
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
logger.info("updating config with stored data")
|
||||||
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
||||||
|
clouds[0]["args"]["username"] = creds["userName"]
|
||||||
|
clouds[0]["args"]["passwd"] = creds["password"]
|
||||||
|
cfg["clouds"] = clouds
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
reboot()
|
||||||
|
else:
|
||||||
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
||||||
|
checkFileExist("creds.json")
|
||||||
|
with open(credspath, "r") as c:
|
||||||
|
logger.info("updating stored file with new data")
|
||||||
|
cfg = checkParameterConfig(cfg)
|
||||||
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
||||||
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
||||||
|
creds = json.load(c)
|
||||||
|
if creds:
|
||||||
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
else:
|
||||||
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
||||||
|
creds["userName"] = clouds[0]["args"]["username"]
|
||||||
|
creds["password"] = clouds[0]["args"]["passwd"]
|
||||||
|
with open(credspath, "w") as cw:
|
||||||
|
json.dump(creds,cw)
|
||||||
|
|
||||||
|
def checkParameterConfig(cfg):
|
||||||
|
logger.info("Checking Parameters!!!!")
|
||||||
|
paramspath = "/var/user/files/params.json"
|
||||||
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
||||||
|
#check stored values
|
||||||
|
checkFileExist("params.json")
|
||||||
|
with open(paramspath, "r") as f:
|
||||||
|
logger.info("Opened param storage file")
|
||||||
|
params = json.load(f)
|
||||||
|
if params:
|
||||||
|
if cfgparams != params:
|
||||||
|
#go through each param
|
||||||
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
||||||
|
#if key in cfg but not in params copy to params
|
||||||
|
logger.info("equalizing params between cfg and stored")
|
||||||
|
for key in cfgparams.keys():
|
||||||
|
try:
|
||||||
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
except:
|
||||||
|
params[key] = cfgparams[key]
|
||||||
|
cfg["labels"] = convertJSONtoDS(params)
|
||||||
|
_set_global_args(convertJSONtoDS(params))
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
json.dump(params, p)
|
||||||
|
else:
|
||||||
|
with open(paramspath, "w") as p:
|
||||||
|
logger.info("initializing param file with params in memory")
|
||||||
|
json.dump(convertDStoJSON(get_params()), p)
|
||||||
|
cfg["labels"] = get_params()
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
|
def sendData(message):
|
||||||
|
payload = {}
|
||||||
|
payload["ts"] = (round(dt.timestamp(dt.now())/600)*600)*1000
|
||||||
|
payload["values"] = {}
|
||||||
|
try:
|
||||||
|
checkCredentialConfig()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
for measure in message["measures"]:
|
||||||
|
try:
|
||||||
|
logger.debug(measure)
|
||||||
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
for chunk in chunk_payload(payload=payload):
|
||||||
|
publish(__topic__, json.dumps(chunk), __qos__)
|
||||||
|
time.sleep(2)
|
||||||
|
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||||
73
Pub_Sub/fkplcpond/thingsboard/v3/sub/receiveCommand.py
Normal file
73
Pub_Sub/fkplcpond/thingsboard/v3/sub/receiveCommand.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import json, time
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from quickfaas.measure import recall, write
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from common.Logger import logger
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
|
def sync():
|
||||||
|
#get new values and send
|
||||||
|
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
|
||||||
|
topic = "v1/devices/me/telemetry"
|
||||||
|
try:
|
||||||
|
data = recall()#json.loads(recall().decode("utf-8"))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
logger.debug(data)
|
||||||
|
for controller in data:
|
||||||
|
for measure in controller["measures"]:
|
||||||
|
#publish measure
|
||||||
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
|
logger.debug("Sending on topic: {}".format(topic))
|
||||||
|
logger.debug("Sending value: {}".format(payload))
|
||||||
|
for chunk in chunk_payload(payload=payload):
|
||||||
|
publish(topic, json.dumps(chunk), 1)
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
def writeplctag(value, wizard_api):
|
||||||
|
try:
|
||||||
|
#value = json.loads(value.replace("'",'"'))
|
||||||
|
logger.debug(value)
|
||||||
|
message = {"plcpond":{value["measurement"]: value["value"]}}
|
||||||
|
resp = wizard_api.write_plc_values(message)
|
||||||
|
#logger.debug("RETURN FROM WRITE: {}".format(resp))
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def receiveCommand(topic, payload, wizard_api):
|
||||||
|
try:
|
||||||
|
logger.debug(topic)
|
||||||
|
logger.debug(json.loads(payload))
|
||||||
|
p = json.loads(payload)
|
||||||
|
command = p["method"]
|
||||||
|
commands = {
|
||||||
|
"sync": sync,
|
||||||
|
"writeplctag": writeplctag,
|
||||||
|
}
|
||||||
|
if command == "setPLCTag":
|
||||||
|
result = commands["writeplctag"](p["params"],wizard_api)
|
||||||
|
if result:
|
||||||
|
sync()
|
||||||
|
#commands[command](p["mac"].lower(),p["payload"]["value"], wizard_api)
|
||||||
|
#logger.debug(command)
|
||||||
|
ack(topic.split("/")[-1], wizard_api)
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(e)
|
||||||
|
|
||||||
|
|
||||||
|
def ack(msgid, wizard_api):
|
||||||
|
#logger.debug(msgid)
|
||||||
|
#logger.debug(mac)
|
||||||
|
#logger.debug(name)
|
||||||
|
#logger.debug(value)
|
||||||
|
wizard_api.mqtt_publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}))
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import json, os
|
import json, os, time
|
||||||
from datetime import datetime as dt
|
from datetime import datetime as dt
|
||||||
from common.Logger import logger
|
from common.Logger import logger
|
||||||
from quickfaas.remotebus import publish
|
from quickfaas.remotebus import publish
|
||||||
@@ -177,7 +177,14 @@ def get_totalizers():
|
|||||||
}
|
}
|
||||||
return totalizers
|
return totalizers
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def sendData(message,wizard_api):
|
def sendData(message,wizard_api):
|
||||||
@@ -200,7 +207,9 @@ def sendData(message,wizard_api):
|
|||||||
payload["values"]["latitude"], payload["values"]["longitude"], payload["values"]["speed"] = getGPS()
|
payload["values"]["latitude"], payload["values"]["longitude"], payload["values"]["speed"] = getGPS()
|
||||||
except:
|
except:
|
||||||
logger.error("Could not get GPS coordinates")
|
logger.error("Could not get GPS coordinates")
|
||||||
publish(__topic__, json.dumps(payload), __qos__)
|
for chunk in chunk_payload(payload=payload):
|
||||||
|
publish(__topic__, json.dumps(chunk), __qos__)
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
if dayReset:
|
if dayReset:
|
||||||
resetPayload["values"]["yesterday_volume"] = payload["values"]["day_volume"]
|
resetPayload["values"]["yesterday_volume"] = payload["values"]["day_volume"]
|
||||||
|
|||||||
@@ -117,6 +117,15 @@ def checkParameterConfig(cfg):
|
|||||||
|
|
||||||
return cfg
|
return cfg
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
def sendData(message):
|
def sendData(message):
|
||||||
#logger.debug(message)
|
#logger.debug(message)
|
||||||
try:
|
try:
|
||||||
@@ -141,15 +150,9 @@ def sendData(message):
|
|||||||
payload["values"]["oil_run_tank_level"] = run_tanks["oil_0" + str(run_tanks["oil_run_tank"]) + "_level"]
|
payload["values"]["oil_run_tank_level"] = run_tanks["oil_0" + str(run_tanks["oil_run_tank"]) + "_level"]
|
||||||
payload["values"]["water_run_tank_level"] = run_tanks["water_0" + str(run_tanks["water_run_tank"]) + "_level"]
|
payload["values"]["water_run_tank_level"] = run_tanks["water_0" + str(run_tanks["water_run_tank"]) + "_level"]
|
||||||
|
|
||||||
values_1 = dict(list(payload["values"].items())[len(payload["values"])//2:])
|
for chunk in chunk_payload(payload=payload):
|
||||||
payload_1 = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
publish(__topic__, json.dumps(chunk), __qos__)
|
||||||
payload_1["values"] = values_1
|
time.sleep(2)
|
||||||
publish(__topic__, json.dumps(payload_1), __qos__)
|
|
||||||
time.sleep(2)
|
|
||||||
values_2 = dict(list(payload["values"].items())[:len(payload["values"])//2])
|
|
||||||
payload_2 = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
|
||||||
payload_2["values"] = values_2
|
|
||||||
publish(__topic__, json.dumps(payload_2), __qos__)
|
|
||||||
#publish(__topic__, json.dumps(payload), __qos__)
|
|
||||||
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
|
||||||
|
|
||||||
|
|||||||
33
code snippets/chunked_sync_function.py
Normal file
33
code snippets/chunked_sync_function.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import json, time
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from quickfaas.measure import recall, write
|
||||||
|
from quickfaas.remotebus import publish
|
||||||
|
from common.Logger import logger
|
||||||
|
|
||||||
|
# Helper function to split the payload into chunks
|
||||||
|
def chunk_payload(payload, chunk_size=20):
|
||||||
|
chunked_values = list(payload["values"].items())
|
||||||
|
for i in range(0, len(chunked_values), chunk_size):
|
||||||
|
yield {
|
||||||
|
"ts": payload["ts"],
|
||||||
|
"values": dict(chunked_values[i:i+chunk_size])
|
||||||
|
}
|
||||||
|
|
||||||
|
def sync():
|
||||||
|
#get new values and send
|
||||||
|
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
|
||||||
|
topic = "v1/devices/me/telemetry"
|
||||||
|
try:
|
||||||
|
data = recall()#json.loads(recall().decode("utf-8"))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
logger.debug(data)
|
||||||
|
for controller in data:
|
||||||
|
for measure in controller["measures"]:
|
||||||
|
#publish measure
|
||||||
|
payload["values"][measure["name"]] = measure["value"]
|
||||||
|
logger.debug("Sending on topic: {}".format(topic))
|
||||||
|
logger.debug("Sending value: {}".format(payload))
|
||||||
|
for chunk in chunk_payload(payload=payload):
|
||||||
|
publish(topic, json.dumps(chunk), 1)
|
||||||
|
time.sleep(2)
|
||||||
179
code snippets/chunked_sync_function_gateway.ipynb
Normal file
179
code snippets/chunked_sync_function_gateway.ipynb
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import json, time"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 6,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"payload = {\n",
|
||||||
|
" \"Controller A\": [\n",
|
||||||
|
" {\n",
|
||||||
|
" \"ts\": 1700149800000,\n",
|
||||||
|
" \"values\": {\n",
|
||||||
|
" \"Temperature\": 22,\n",
|
||||||
|
" \"Pressure\": 101.3,\n",
|
||||||
|
" \"one\": 12,\n",
|
||||||
|
" \"two\": 2,\n",
|
||||||
|
" \"three\": 3,\n",
|
||||||
|
" \"4\": 4,\n",
|
||||||
|
" \"5\": 5,\n",
|
||||||
|
" \"6\": 6,\n",
|
||||||
|
" \"7\": 7,\n",
|
||||||
|
" \"Temperature1\": 22,\n",
|
||||||
|
" \"Pressure1\": 101.3,\n",
|
||||||
|
" \"one1\": 12,\n",
|
||||||
|
" \"two1\": 2,\n",
|
||||||
|
" \"three1\": 3,\n",
|
||||||
|
" \"41\": 4,\n",
|
||||||
|
" \"51\": 5,\n",
|
||||||
|
" \"61\": 6,\n",
|
||||||
|
" \"71\": 7,\n",
|
||||||
|
" \"Temperature2\": 22,\n",
|
||||||
|
" \"Pressure2\": 101.3,\n",
|
||||||
|
" \"one2\": 12,\n",
|
||||||
|
" \"two2\": 2,\n",
|
||||||
|
" \"three2\": 3,\n",
|
||||||
|
" \"42\": 4,\n",
|
||||||
|
" \"52\": 5,\n",
|
||||||
|
" \"62\": 6,\n",
|
||||||
|
" \"72\": 7,\n",
|
||||||
|
" \"Temperature3\": 22,\n",
|
||||||
|
" \"Pressure3\": 101.3,\n",
|
||||||
|
" \"one3\": 12,\n",
|
||||||
|
" \"two3\": 2,\n",
|
||||||
|
" \"three3\": 3,\n",
|
||||||
|
" \"43\": 4,\n",
|
||||||
|
" \"53\": 5,\n",
|
||||||
|
" \"63\": 6,\n",
|
||||||
|
" \"73\": 7\n",
|
||||||
|
" }\n",
|
||||||
|
" }\n",
|
||||||
|
" ],\n",
|
||||||
|
" \"Controller B\": [\n",
|
||||||
|
" {\n",
|
||||||
|
" \"ts\": 1700149800000,\n",
|
||||||
|
" \"values\": {\n",
|
||||||
|
" \"Humidity\": 45\n",
|
||||||
|
" }\n",
|
||||||
|
" }\n",
|
||||||
|
" ]\n",
|
||||||
|
"}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"attributes_payload = {\n",
|
||||||
|
" \"Controller A\": {\n",
|
||||||
|
" \"latestReportTime\": 1700149800000\n",
|
||||||
|
" },\n",
|
||||||
|
" \"Controller B\": {\n",
|
||||||
|
" \"latestReportTime\": 1700149800000\n",
|
||||||
|
" }\n",
|
||||||
|
"}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 10,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def chunk_payload(payload, chunk_size=20, is_attributes_payload=False):\n",
|
||||||
|
" if is_attributes_payload:\n",
|
||||||
|
" # For attributes payload, chunk the controllers\n",
|
||||||
|
" controllers = list(payload.items())\n",
|
||||||
|
" for i in range(0, len(controllers), chunk_size):\n",
|
||||||
|
" yield dict(controllers[i:i + chunk_size])\n",
|
||||||
|
" else:\n",
|
||||||
|
" # For data payload, chunk the values within each controller\n",
|
||||||
|
" for controller, data in payload.items():\n",
|
||||||
|
" for entry in data:\n",
|
||||||
|
" ts = entry['ts']\n",
|
||||||
|
" values = entry['values']\n",
|
||||||
|
" chunked_values = list(values.items())\n",
|
||||||
|
" for i in range(0, len(chunked_values), chunk_size):\n",
|
||||||
|
" yield {\n",
|
||||||
|
" \"controller\": controller,\n",
|
||||||
|
" \"ts\": ts,\n",
|
||||||
|
" \"values\": dict(chunked_values[i:i + chunk_size])\n",
|
||||||
|
" }\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 12,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"__topic__ {\"controller\": \"Controller A\", \"ts\": 1700149800000, \"values\": {\"Temperature\": 22, \"Pressure\": 101.3, \"one\": 12, \"two\": 2, \"three\": 3, \"4\": 4, \"5\": 5, \"6\": 6, \"7\": 7, \"Temperature1\": 22, \"Pressure1\": 101.3, \"one1\": 12, \"two1\": 2, \"three1\": 3, \"41\": 4, \"51\": 5, \"61\": 6, \"71\": 7, \"Temperature2\": 22, \"Pressure2\": 101.3}} __qos__\n",
|
||||||
|
"__topic__ {\"controller\": \"Controller A\", \"ts\": 1700149800000, \"values\": {\"one2\": 12, \"two2\": 2, \"three2\": 3, \"42\": 4, \"52\": 5, \"62\": 6, \"72\": 7, \"Temperature3\": 22, \"Pressure3\": 101.3, \"one3\": 12, \"two3\": 2, \"three3\": 3, \"43\": 4, \"53\": 5, \"63\": 6, \"73\": 7}} __qos__\n",
|
||||||
|
"__topic__ {\"controller\": \"Controller B\", \"ts\": 1700149800000, \"values\": {\"Humidity\": 45}} __qos__\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"for chunk in chunk_payload(payload=payload):\n",
|
||||||
|
" print(\"__topic__\", json.dumps(chunk), \"__qos__\")\n",
|
||||||
|
" time.sleep(2)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 11,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"__topic__ {\"Controller A\": {\"latestReportTime\": 1700149800000}, \"Controller B\": {\"latestReportTime\": 1700149800000}} __qos__\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"for chunk in chunk_payload(payload=attributes_payload, is_attributes_payload=True):\n",
|
||||||
|
" print(\"__topic__\", json.dumps(chunk), \"__qos__\")\n",
|
||||||
|
" time.sleep(2)"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "tbDataCollector",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.10.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
||||||
105
code snippets/phoneValidation.ipynb
Normal file
105
code snippets/phoneValidation.ipynb
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 11,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import phonenumbers\n",
|
||||||
|
"from phonenumbers import carrier"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 40,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"['+14322228152',\n",
|
||||||
|
" '+13254509104',\n",
|
||||||
|
" '+14323012230',\n",
|
||||||
|
" '+14326314847',\n",
|
||||||
|
" '+14326311606',\n",
|
||||||
|
" '+14326311503',\n",
|
||||||
|
" '+14326409671']"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 40,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"numbers = \"+14322228152,+13254509104,+14323012230,+14326314847,+14326311606,+14326311503,+14326409671\"\n",
|
||||||
|
"numbers = numbers.split(\",\")\n",
|
||||||
|
"numbers"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 41,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Country Code: 1 National Number: 4322228152\n",
|
||||||
|
"Is Possible: True\n",
|
||||||
|
"Is Valid: True\n",
|
||||||
|
"Country Code: 1 National Number: 3254509104\n",
|
||||||
|
"Is Possible: True\n",
|
||||||
|
"Is Valid: True\n",
|
||||||
|
"Country Code: 1 National Number: 4323012230\n",
|
||||||
|
"Is Possible: True\n",
|
||||||
|
"Is Valid: True\n",
|
||||||
|
"Country Code: 1 National Number: 4326314847\n",
|
||||||
|
"Is Possible: True\n",
|
||||||
|
"Is Valid: True\n",
|
||||||
|
"Country Code: 1 National Number: 4326311606\n",
|
||||||
|
"Is Possible: True\n",
|
||||||
|
"Is Valid: True\n",
|
||||||
|
"Country Code: 1 National Number: 4326311503\n",
|
||||||
|
"Is Possible: True\n",
|
||||||
|
"Is Valid: True\n",
|
||||||
|
"Country Code: 1 National Number: 4326409671\n",
|
||||||
|
"Is Possible: True\n",
|
||||||
|
"Is Valid: True\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"for number in numbers:\n",
|
||||||
|
" n = phonenumbers.parse(number, None)\n",
|
||||||
|
" print(n)\n",
|
||||||
|
" print(f\"Is Possible: {phonenumbers.is_possible_number(n)}\")\n",
|
||||||
|
" print(f\"Is Valid: {phonenumbers.is_valid_number(n)}\")\n",
|
||||||
|
" #print(f\"Carrier: {carrier.name_for_number(n,\"en\")}\")\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "phoneValidator",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.12.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
||||||
280
code snippets/plcfreshwater tests 1.ipynb
Normal file
280
code snippets/plcfreshwater tests 1.ipynb
Normal file
File diff suppressed because one or more lines are too long
196
code snippets/plcfreshwater tests 2.ipynb
Normal file
196
code snippets/plcfreshwater tests 2.ipynb
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user