Files
HP_InHand_IG502/Pub_Sub/sp_transfer/thingsboard/sp_transfer.cfg
2024-07-31 13:56:21 -05:00

412 lines
21 KiB
INI

{
"controllers": [
{
"enable": 1,
"protocol": "AllenBradley MicroCip",
"name": "sp_transfer",
"samplePeriod": 10,
"desc": "",
"expired": 1000,
"args": {
"slot": 0,
"connectTimeOut": 10000
},
"enableDebug": 0,
"enablePerOnchange": 0,
"endpoint": "192.168.1.12:44818"
}
],
"measures": [
{
"name": "water_tank_01_level",
"ctrlName": "sp_transfer",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Water_Tank_1_Level",
"decimal": 2,
"readWrite": "ro",
"unit": "",
"desc": "",
"storageLwTSDB": 0,
"transformType": 0
},
{
"name": "auto_input",
"ctrlName": "sp_transfer",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "Raw_Auto_Input",
"bitMap": 0,
"reverseBit": 0,
"readWrite": "ro",
"unit": "",
"desc": "",
"storageLwTSDB": 0,
"transformType": 0
},
{
"name": "cloud_control_cmd",
"ctrlName": "sp_transfer",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "CMD_Cloud_Control",
"bitMap": 0,
"reverseBit": 0,
"readWrite": "rw",
"unit": "",
"desc": "",
"storageLwTSDB": 0,
"transformType": 0
},
{
"name": "start_permissive_spt",
"ctrlName": "sp_transfer",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "WTP_Start_Setpoint",
"decimal": 2,
"readWrite": "rw",
"unit": "",
"desc": "",
"storageLwTSDB": 0,
"transformType": 0
},
{
"name": "stop_permissive_spt",
"ctrlName": "sp_transfer",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "WTP_Stop_Setpoint",
"decimal": 2,
"readWrite": "rw",
"unit": "",
"desc": "",
"storageLwTSDB": 0,
"transformType": 0
},
{
"name": "water_tank_01_hi_alm",
"ctrlName": "sp_transfer",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "WT1_Hi_AL0",
"bitMap": 0,
"reverseBit": 0,
"readWrite": "ro",
"unit": "",
"desc": "",
"storageLwTSDB": 0,
"transformType": 0
},
{
"name": "water_tank_01_hihi_alm",
"ctrlName": "sp_transfer",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "WT1_HiHi_AL0",
"bitMap": 0,
"reverseBit": 0,
"readWrite": "ro",
"unit": "",
"desc": "",
"storageLwTSDB": 0,
"transformType": 0
},
{
"name": "run_status",
"ctrlName": "sp_transfer",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "Raw_Run_Status",
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"storageLwTSDB": 0,
"reverseBit": 0,
"bitMap": 0
},
{
"name": "water_tank_01_hi_spt",
"ctrlName": "sp_transfer",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "WT1_Hi_SPT",
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"storageLwTSDB": 0,
"decimal": 2
},
{
"name": "water_tank_01_hihi_spt",
"ctrlName": "sp_transfer",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "WT1_HiHi_SPT",
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"storageLwTSDB": 0,
"decimal": 2
}
],
"alarmLables": [
"default"
],
"alarms": [],
"groups": [
{
"name": "default",
"uploadInterval": 600,
"LwTSDBSize": 150000,
"strategy": 1,
"historyDataPath": "/var/user/data/dbhome/device_supervisor/LwTSDB",
"enablePerOnchange": 0
}
],
"misc": {
"maxAlarmRecordSz": 2000,
"logLvl": "INFO",
"coms": [
{
"name": "rs232",
"baud": 9600,
"bits": 8,
"stopbits": 1,
"parityChk": "n"
},
{
"name": "rs485",
"baud": 9600,
"bits": 8,
"stopbits": 1,
"parityChk": "n"
}
],
"cachePath": "/var/user/data/dbhome/device_supervisor/offlinedata",
"cacheSize": 10000,
"debugLogPath": "/var/user/data/dbhome/device_supervisor/debugLog",
"debugLogSize": 2000
},
"clouds": [
{
"cacheSize": 10000,
"enable": 1,
"name": "default",
"type": "Standard MQTT",
"args": {
"host": "hp.henrypump.cloud",
"port": 1883,
"clientId": "unknown",
"auth": 1,
"tls": 0,
"cleanSession": 0,
"mqttVersion": "v3.1.1",
"keepalive": 60,
"key": "",
"cert": "",
"rootCA": "",
"verifyServer": 0,
"verifyClient": 0,
"username": "unknown",
"passwd": "unknown",
"willQos": 0,
"willRetain": 0,
"willTopic": "",
"willPayload": "",
"tlsAuth": "caSelfSigned"
},
"uploadRules": []
}
],
"quickfaas": {
"genericFuncs": [],
"uploadFuncs": [
{
"qos": 1,
"funcName": "sendData",
"script": "# Enter your python code.\nimport json, os, time\nfrom datetime import datetime as dt\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nfrom quickfaas.global_dict import get as get_params\nfrom quickfaas.global_dict import _set_global_args\nfrom mobiuspi_lib.gps import GPS\n\ndef reboot():\n #basic = Basic()\n logger.info(\"!\" * 10 + \"REBOOTING DEVICE\" + \"!\"*10)\n r = os.popen(\"kill -s SIGHUP `cat /var/run/python/supervisord.pid`\").read()\n logger.info(f\"REBOOT : {r}\")\n\ndef checkFileExist(filename):\n path = \"/var/user/files\"\n if not os.path.exists(path):\n logger.info(\"no folder making files folder in var/user\")\n os.makedirs(path)\n with open(path + \"/\" + filename, \"a\") as f:\n json.dump({}, f)\n if not os.path.exists(path + \"/\" + filename):\n logger.info(\"no creds file making creds file\")\n with open(path + \"/\" + filename, \"a\") as f:\n json.dump({}, f)\n\ndef convertDStoJSON(ds):\n j = dict()\n for x in ds:\n j[x[\"key\"]] = x[\"value\"]\n return j\n\ndef convertJSONtoDS(j):\n d = []\n for key in j.keys():\n d.append({\"key\": key, \"value\": j[key]})\n return d\n\ndef checkCredentialConfig():\n logger.info(\"CHECKING CONFIG\")\n cfgpath = \"/var/user/cfg/device_supervisor/device_supervisor.cfg\"\n credspath = \"/var/user/files/creds.json\"\n cfg = dict()\n with open(cfgpath, \"r\") as f:\n cfg = json.load(f)\n clouds = cfg.get(\"clouds\")\n logger.info(clouds)\n #if not configured then try to configure from stored values\n if clouds[0][\"args\"][\"clientId\"] == \"unknown\" or clouds[0][\"args\"][\"username\"] == \"unknown\" or not clouds[0][\"args\"][\"passwd\"] or clouds[0][\"args\"][\"passwd\"] == \"unknown\":\n checkFileExist(\"creds.json\")\n with open(credspath, \"r\") as c:\n creds = json.load(c)\n if creds:\n logger.info(\"updating config with stored data\")\n clouds[0][\"args\"][\"clientId\"] = creds[\"clientId\"]\n clouds[0][\"args\"][\"username\"] = creds[\"userName\"]\n clouds[0][\"args\"][\"passwd\"] = creds[\"password\"]\n cfg[\"clouds\"] = clouds\n cfg = checkParameterConfig(cfg)\n with open(cfgpath, \"w\", encoding='utf-8') as n:\n json.dump(cfg, n, indent=1, ensure_ascii=False)\n reboot()\n else:\n #assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data\n checkFileExist(\"creds.json\")\n with open(credspath, \"r\") as c:\n logger.info(\"updating stored file with new data\")\n cfg = checkParameterConfig(cfg)\n with open(cfgpath, \"w\", encoding='utf-8') as n:\n json.dump(cfg, n, indent=1, ensure_ascii=False)\n creds = json.load(c)\n if creds:\n if creds[\"clientId\"] != clouds[0][\"args\"][\"clientId\"]:\n creds[\"clientId\"] = clouds[0][\"args\"][\"clientId\"]\n if creds[\"userName\"] != clouds[0][\"args\"][\"username\"]:\n creds[\"userName\"] = clouds[0][\"args\"][\"username\"]\n if creds[\"password\"] != clouds[0][\"args\"][\"passwd\"]:\n creds[\"password\"] = clouds[0][\"args\"][\"passwd\"]\n else:\n creds[\"clientId\"] = clouds[0][\"args\"][\"clientId\"]\n creds[\"userName\"] = clouds[0][\"args\"][\"username\"]\n creds[\"password\"] = clouds[0][\"args\"][\"passwd\"]\n with open(credspath, \"w\") as cw:\n json.dump(creds,cw)\n\ndef checkParameterConfig(cfg):\n logger.info(\"Checking Parameters!!!!\")\n paramspath = \"/var/user/files/params.json\"\n cfgparams = convertDStoJSON(cfg.get(\"labels\"))\n #check stored values \n checkFileExist(\"params.json\")\n with open(paramspath, \"r\") as f:\n logger.info(\"Opened param storage file\")\n params = json.load(f)\n if params:\n if cfgparams != params:\n #go through each param\n #if not \"unknown\" and cfg and params aren't the same take from cfg likely updated manually\n #if key in cfg but not in params copy to params\n logger.info(\"equalizing params between cfg and stored\")\n for key in cfgparams.keys():\n try:\n if cfgparams[key] != params[key] and cfgparams[key] != \"unknown\":\n params[key] = cfgparams[key]\n except:\n params[key] = cfgparams[key]\n cfg[\"labels\"] = convertJSONtoDS(params)\n _set_global_args(convertJSONtoDS(params))\n with open(paramspath, \"w\") as p:\n json.dump(params, p)\n else:\n with open(paramspath, \"w\") as p:\n logger.info(\"initializing param file with params in memory\")\n json.dump(convertDStoJSON(get_params()), p)\n cfg[\"labels\"] = get_params()\n \n return cfg\n\ndef getGPS():\n # Create a gps instance\n gps = GPS()\n\n # Retrieve GPS information\n position_status = gps.get_position_status()\n logger.debug(\"position_status: \")\n logger.debug(position_status)\n latitude = position_status[\"latitude\"].split(\" \")\n longitude = position_status[\"longitude\"].split(\" \")\n lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)\n lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)\n if latitude[2] == \"S\":\n lat_dec = lat_dec * -1\n if longitude[2] == \"W\":\n lon_dec = lon_dec * -1\n #lat_dec = round(lat_dec, 7)\n #lon_dec = round(lon_dec, 7)\n logger.info(\"HERE IS THE GPS COORDS\")\n logger.info(f\"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}\")\n speedKnots = position_status[\"speed\"].split(\" \")\n speedMPH = float(speedKnots[0]) * 1.151\n return (f\"{lat_dec:.8f}\",f\"{lon_dec:.8f}\",f\"{speedMPH:.2f}\")\n\ndef chunk_payload(payload, chunk_size=20):\n if \"values\" in payload:\n # Original format: {\"ts\": ..., \"values\": {...}}\n chunked_values = list(payload[\"values\"].items())\n for i in range(0, len(chunked_values), chunk_size):\n yield {\n \"ts\": payload[\"ts\"],\n \"values\": dict(chunked_values[i:i+chunk_size])\n }\n else:\n # New format: {\"key1\": \"value1\", \"key2\": \"value2\"}\n chunked_keys = list(payload.keys())\n for i in range(0, len(chunked_keys), chunk_size):\n yield {k: payload[k] for k in chunked_keys[i:i+chunk_size]}\n\ndef sendData(message):\n #logger.debug(message)\n try:\n checkCredentialConfig()\n except Exception as e:\n logger.error(e)\n payload = {\"ts\": (round(dt.timestamp(dt.now())/600)*600)*1000, \"values\": {}}\n attributes_payload = {}\n for measure in message[\"measures\"]:\n try:\n logger.debug(measure)\n if measure[\"health\"] == 1:\n if \"_spt\" in measure[\"name\"]:\n attributes_payload[measure[\"name\"]] = measure[\"value\"]\n payload[\"values\"][measure[\"name\"]] = measure[\"value\"] \n except Exception as e:\n logger.error(e)\n\n try:\n payload[\"values\"][\"latitude\"], payload[\"values\"][\"longitude\"], payload[\"values\"][\"speed\"] = getGPS()\n except:\n logger.error(\"Could not get GPS coordinates\")\n\n for chunk in chunk_payload(payload=payload):\n publish(__topic__, json.dumps(chunk), __qos__)\n time.sleep(2)\n \n attributes_payload[\"latestReportTime\"] = (round(dt.timestamp(dt.now())/600)*600)*1000\n for chunk in chunk_payload(payload=attributes_payload):\n publish(\"v1/devices/me/attributes\", json.dumps(chunk), __qos__)\n time.sleep(2)\n \n",
"name": "sendData",
"trigger": "measure_event",
"topic": "v1/devices/me/telemetry",
"msgType": 0,
"cloudName": "default",
"groups": [
"default"
]
}
],
"downloadFuncs": [
{
"name": "receiveCommand",
"topic": "v1/devices/me/rpc/request/+",
"qos": 1,
"funcName": "receiveCommand",
"payload_type": "JSON",
"script": "import json, time\nfrom datetime import datetime as dt\nfrom quickfaas.measure import recall, write\nfrom quickfaas.remotebus import publish\nfrom common.Logger import logger\n\n# Helper function to split the payload into chunks\ndef chunk_payload(payload, chunk_size=20):\n chunked_values = list(payload[\"values\"].items())\n for i in range(0, len(chunked_values), chunk_size):\n yield {\n \"ts\": payload[\"ts\"],\n \"values\": dict(chunked_values[i:i+chunk_size])\n }\n\ndef sync():\n #get new values and send\n payload = {\"ts\": round(dt.timestamp(dt.now()))*1000, \"values\": {}}\n topic = \"v1/devices/me/telemetry\"\n try:\n data = recall()#json.loads(recall().decode(\"utf-8\"))\n except Exception as e:\n logger.error(e)\n logger.debug(data)\n for controller in data:\n for measure in controller[\"measures\"]:\n #publish measure\n payload[\"values\"][measure[\"name\"]] = measure[\"value\"]\n logger.debug(\"Sending on topic: {}\".format(topic))\n logger.debug(\"Sending value: {}\".format(payload))\n for chunk in chunk_payload(payload=payload):\n publish(topic, json.dumps(chunk), 1)\n time.sleep(2)\ndef writeplctag(value):\n #value in the form {\"measurement\": <measurement_name>, \"value\": <value to write>}\n try:\n #value = json.loads(value.replace(\"'\",'\"'))\n logger.debug(value)\n #payload format: [{\"name\": \"advvfdipp\", \"measures\": [{\"name\": \"manualfrequencysetpoint\", \"value\": 49}]}]\n message = [{\"name\": \"sp_transfer\", \"measures\":[{\"name\":value[\"measurement\"], \"value\": value[\"value\"]}]}]\n resp = write(message) \n logger.debug(\"RETURN FROM WRITE: {}\".format(resp))\n return True\n except Exception as e:\n logger.debug(e)\n return False\n \ndef receiveCommand(topic, payload):\n try:\n logger.debug(topic)\n logger.debug(json.loads(payload))\n p = json.loads(payload)\n command = p[\"method\"]\n commands = {\n \"sync\": sync,\n \"writeplctag\": writeplctag,\n } \n if command == \"setPLCTag\":\n try:\n result = commands[\"writeplctag\"](p[\"params\"])\n logger.debug(result)\n except Exception as e:\n logger.error(e)\n #logger.debug(command)\n ack(topic.split(\"/\")[-1])\n time.sleep(5)\n sync()\n except Exception as e:\n logger.debug(e)\n \n\ndef ack(msgid):\n #logger.debug(msgid)\n #logger.debug(mac)\n #logger.debug(name)\n #logger.debug(value)\n publish(\"v1/devices/me/rpc/response/\" + str(msgid), json.dumps({\"msg\": {\"time\": time.time()}, \"metadata\": \"\", \"msgType\": \"\"}), 1)\n",
"msgType": 0,
"cloudName": "default",
"trigger": "command_event"
},
{
"name": "receiveAttributes",
"topic": "v1/devices/me/attributes",
"qos": 1,
"funcName": "receiveAttributes",
"payload_type": "JSON",
"script": "import json, time\nfrom datetime import datetime as dt\nfrom quickfaas.measure import recall, write\nfrom quickfaas.remotebus import publish\nfrom common.Logger import logger\n\n# Helper function to split the payload into chunks\ndef chunk_payload(payload, chunk_size=20):\n chunked_values = list(payload[\"values\"].items())\n for i in range(0, len(chunked_values), chunk_size):\n yield {\n \"ts\": payload[\"ts\"],\n \"values\": dict(chunked_values[i:i+chunk_size])\n }\n\ndef sync():\n #get new values and send\n payload = {\"ts\": round(dt.timestamp(dt.now()))*1000, \"values\": {}}\n topic = \"v1/devices/me/telemetry\"\n try:\n data = recall()#json.loads(recall().decode(\"utf-8\"))\n except Exception as e:\n logger.error(e)\n logger.debug(data)\n for controller in data:\n for measure in controller[\"measures\"]:\n #publish measure\n payload[\"values\"][measure[\"name\"]] = measure[\"value\"]\n logger.debug(\"Sending on topic: {}\".format(topic))\n logger.debug(\"Sending value: {}\".format(payload))\n for chunk in chunk_payload(payload=payload):\n publish(topic, json.dumps(chunk), 1)\n time.sleep(2)\ndef writeplctag(value):\n #value in the form {\"measurement\": <measurement_name>, \"value\": <value to write>}\n try:\n #value = json.loads(value.replace(\"'\",'\"'))\n logger.debug(value)\n #payload format: [{\"name\": \"advvfdipp\", \"measures\": [{\"name\": \"manualfrequencysetpoint\", \"value\": 49}]}]\n message = [{\"name\": \"sp_transfer\", \"measures\":[{\"name\":value[\"measurement\"], \"value\": value[\"value\"]}]}]\n resp = write(message) \n logger.debug(\"RETURN FROM WRITE: {}\".format(resp))\n return True\n except Exception as e:\n logger.debug(e)\n return False\n \ndef receiveAttributes(topic, payload):\n try:\n logger.debug(topic)\n logger.debug(json.loads(payload))\n p = json.loads(payload)\n for key, value in p.items():\n try:\n result = writeplctag({\"measurement\":key, \"value\":value})\n logger.debug(result)\n except Exception as e:\n logger.error(e)\n #logger.debug(command)\n sync()\n except Exception as e:\n logger.debug(e)\n \n\n",
"msgType": 0,
"cloudName": "default",
"trigger": "command_event"
}
]
},
"labels": [
{
"key": "SN",
"value": "GF5022311031657"
},
{
"key": "MAC",
"value": "00:18:05:28:4a:86"
}
],
"modbusSlave": {
"enable": 0,
"protocol": "Modbus-TCP",
"port": 502,
"slaveAddr": 1,
"int16Ord": "ab",
"int32Ord": "abcd",
"float32Ord": "abcd",
"maxConnection": 5,
"mapping_table": []
},
"modbusRTUSlave": {
"enable": 0,
"protocol": "Modbus-RTU",
"coms": "rs485",
"slaveAddr": 1,
"int16Ord": "ab",
"int32Ord": "abcd",
"float32Ord": "abcd",
"mapping_table": []
},
"iec104Server": {
"enable": 0,
"cotSize": 2,
"port": 2404,
"serverList": [
{
"asduAddr": 1
}
],
"kValue": 12,
"wValue": 8,
"t0": 30,
"t1": 15,
"t2": 10,
"t3": 20,
"maximumLink": 5,
"timeSet": 1,
"byteOrder": "abcd",
"mapping_table": []
},
"iec101Server": {
"enable": 0,
"coms": "rs485",
"mode": "UnBalance",
"linkLen": 2,
"linkAddr": 1,
"asduLen": 2,
"ioaLen": 3,
"cotLen": 2,
"serverList": [
{
"asduAddr": 1
}
],
"linkTimeOut": 2000,
"timeSet": 1,
"idleTimeOut": 10000,
"byteOrder": "abcd",
"mapping_table": {
"YX": [],
"YC": [],
"YK": []
}
},
"iec104Client": {
"enable": 0,
"connectType": 2,
"serverAddr": "ipower.inhandcloud.cn",
"serverPort": 2406,
"communicationCode": "",
"protocol": 1,
"asduAddr": 1,
"tls": 1,
"verification": 1,
"mapping_table": {
"YX": [],
"YC": [],
"YK": []
}
},
"opcuaServer": {
"enable": 0,
"port": 4840,
"maximumLink": 5,
"securityMode": 0,
"identifierType": "String",
"pubsub": 0,
"certificate": "None",
"privateKey": "None",
"mapping_table": []
},
"sl651Slave": {
"enable": 0,
"centerAaddr": 1,
"remoteAddr": "",
"addrCode": "",
"password": "",
"platform_list": [],
"mapping_table": []
},
"hj212Client": {
"enable": 0,
"platform_list": [],
"block_list": [],
"mapping_table": []
},
"southMetadata": {},
"bindMetadata": {},
"bindConfig": {
"enable": 0,
"bind": {
"modelId": "",
"modelName": "",
"srcId": "",
"srcName": "",
"devId": "",
"devName": ""
},
"varGroups": [],
"variables": [],
"alerts": []
},
"templates": {},
"version": "2.7.1"
}