added migration
This commit is contained in:
229
RPi Mistaway Originals/device_base.py
Normal file
229
RPi Mistaway Originals/device_base.py
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
import types
|
||||||
|
import traceback
|
||||||
|
import binascii
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import thread
|
||||||
|
import os
|
||||||
|
import struct
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
import re
|
||||||
|
|
||||||
|
class deviceBase():
|
||||||
|
|
||||||
|
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||||
|
self.offset = offset
|
||||||
|
self.company = companyId
|
||||||
|
self.name = name
|
||||||
|
self.number = number
|
||||||
|
self.q = Q
|
||||||
|
self.deviceName = name + '_[' + mac + ':' + number[0:2] + ':' + number[2:] + ']!'
|
||||||
|
self.chName = "M1" + '_[' + mac + ':'
|
||||||
|
self.chName2 = '_[' + mac + ':'
|
||||||
|
print 'device name is:'
|
||||||
|
print self.deviceName
|
||||||
|
mac2 = mac.replace(":", "")
|
||||||
|
self.mac = mac2.upper()
|
||||||
|
self.address = 1
|
||||||
|
self.debug = True
|
||||||
|
self.mcu = mcu
|
||||||
|
self.firstRun = True
|
||||||
|
self.mqtt = mqtt
|
||||||
|
self.nodes = Nodes
|
||||||
|
#local dictionary of derived nodes ex: localNodes[tank_0199] = self
|
||||||
|
self.localNodes = {}
|
||||||
|
os.system("chmod 777 /root/reboot")
|
||||||
|
os.system("echo nameserver 8.8.8.8 > /etc/resolv.conf")
|
||||||
|
|
||||||
|
|
||||||
|
def sendtodbLoc(self, ch, channel, value, timestamp, deviceName, mac):
|
||||||
|
|
||||||
|
|
||||||
|
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||||
|
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||||
|
|
||||||
|
if not self.localNodes.has_key(localNodesName):
|
||||||
|
self.localNodes[localNodesName] = True
|
||||||
|
self.nodes[localNodesName] = self
|
||||||
|
|
||||||
|
#make the techname
|
||||||
|
lst = textwrap.wrap(str(mac), width=2)
|
||||||
|
tech = ""
|
||||||
|
for i in range(len(lst)):
|
||||||
|
tech += lst[i].lower() + ":"
|
||||||
|
|
||||||
|
|
||||||
|
chName2 = '_[' + tech
|
||||||
|
|
||||||
|
if int(ch) < 10:
|
||||||
|
ch = "0" + str(int(ch))
|
||||||
|
|
||||||
|
if len(ch) > 2:
|
||||||
|
ch = ch[:-2]
|
||||||
|
|
||||||
|
dname = deviceName + chName2 + str(ch) + ":98]!"
|
||||||
|
|
||||||
|
csplit = re.split(r"(.*?)_\[(.*?)\]", dname)
|
||||||
|
nodeTypeName = csplit[1]
|
||||||
|
uniqueID = csplit[2]
|
||||||
|
company = "194"
|
||||||
|
|
||||||
|
if int(timestamp) == 0:
|
||||||
|
timestamp = self.getTime()
|
||||||
|
|
||||||
|
topic = 'meshify/db/%s/%s/%s/%s/%s' % (company, "_", nodeTypeName, uniqueID.lower(), channel)
|
||||||
|
print topic
|
||||||
|
msg = """[{"value":"%s"}]""" % (str(value))
|
||||||
|
print msg
|
||||||
|
self.q.put([topic, msg, 0])
|
||||||
|
|
||||||
|
def sendtodbDevJSON(self, ch, channel, value, timestamp, deviceName):
|
||||||
|
|
||||||
|
if int(ch) < 10:
|
||||||
|
ch = "0" + str(int(ch))
|
||||||
|
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||||
|
if int(timestamp) == 0:
|
||||||
|
timestamp = self.getTime()
|
||||||
|
|
||||||
|
csplit = re.split(r"(.*?)_\[(.*?)\]", dname)
|
||||||
|
nodeTypeName = csplit[1]
|
||||||
|
uniqueID = csplit[2]
|
||||||
|
company = "194"
|
||||||
|
|
||||||
|
topic = 'meshify/db/%s/%s/%s/%s/%s' % (company, "_", nodeTypeName, uniqueID.lower(), channel)
|
||||||
|
print topic
|
||||||
|
msg = """[{"value":%s}]""" % (str(value))
|
||||||
|
print msg
|
||||||
|
self.q.put([topic, msg, 0])
|
||||||
|
|
||||||
|
def sendtodbLora(self, ch, channel, value, timestamp, deviceName):
|
||||||
|
|
||||||
|
if ":" not in ch:
|
||||||
|
ch = ch[0:2] + ":" + ch[2:4]
|
||||||
|
|
||||||
|
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||||
|
localNodesName = deviceName + "_" + str(ch).replace(':', "")
|
||||||
|
|
||||||
|
if not self.localNodes.has_key(localNodesName):
|
||||||
|
self.localNodes[localNodesName] = True
|
||||||
|
self.nodes[localNodesName] = self
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
dname = deviceName + self.chName2 + str(ch) + "]!"
|
||||||
|
|
||||||
|
csplit = re.split(r"(.*?)_\[(.*?)\]", dname)
|
||||||
|
nodeTypeName = csplit[1]
|
||||||
|
uniqueID = csplit[2]
|
||||||
|
company = "194"
|
||||||
|
|
||||||
|
if int(timestamp) == 0:
|
||||||
|
timestamp = self.getTime()
|
||||||
|
|
||||||
|
topic = 'meshify/db/%s/%s/%s/%s/%s' % (company, "_", nodeTypeName, uniqueID.lower, channel)
|
||||||
|
print topic
|
||||||
|
msg = """[{"value":"%s"}]""" % (str(value))
|
||||||
|
print msg
|
||||||
|
self.q.put([topic, msg, 0])
|
||||||
|
|
||||||
|
def sendtodbDev(self, ch, channel, value, timestamp, deviceName):
|
||||||
|
|
||||||
|
|
||||||
|
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||||
|
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||||
|
|
||||||
|
if not self.localNodes.has_key(localNodesName):
|
||||||
|
self.localNodes[localNodesName] = True
|
||||||
|
self.nodes[localNodesName] = self
|
||||||
|
|
||||||
|
if int(ch) < 10:
|
||||||
|
ch = "0" + str(int(ch))
|
||||||
|
|
||||||
|
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if int(timestamp) == 0:
|
||||||
|
timestamp = self.getTime()
|
||||||
|
|
||||||
|
csplit = re.split(r"(.*?)_\[(.*?)\]", dname)
|
||||||
|
nodeTypeName = csplit[1]
|
||||||
|
uniqueID = csplit[2]
|
||||||
|
company = "194"
|
||||||
|
|
||||||
|
topic = 'meshify/db/%s/%s/%s/%s/%s' % (company, "_", nodeTypeName, uniqueID.lower(), channel)
|
||||||
|
print topic
|
||||||
|
msg = """[{"value":"%s"}]""" % (str(value))
|
||||||
|
print msg
|
||||||
|
self.q.put([topic, msg, 0])
|
||||||
|
|
||||||
|
def sendtodbCH(self, ch, channel, value, timestamp):
|
||||||
|
|
||||||
|
|
||||||
|
if int(ch) < 10:
|
||||||
|
ch = "0" + str(ch)
|
||||||
|
|
||||||
|
dname = self.chName + str(ch) + ":99]!"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if int(timestamp) == 0:
|
||||||
|
timestamp = self.getTime()
|
||||||
|
|
||||||
|
csplit = re.split(r"(.*?)_\[(.*?)\]", dname)
|
||||||
|
nodeTypeName = csplit[1]
|
||||||
|
uniqueID = csplit[2]
|
||||||
|
company = "194"
|
||||||
|
|
||||||
|
topic = 'meshify/db/%s/%s/%s/%s/%s' % (company, "_", nodeTypeName, uniqueID.lower(), channel)
|
||||||
|
print topic
|
||||||
|
msg = """[{"value":"%s"}]""" % (str(value))
|
||||||
|
print msg
|
||||||
|
self.q.put([topic, msg, 0])
|
||||||
|
|
||||||
|
def sendtodb(self, channel, value, timestamp):
|
||||||
|
|
||||||
|
if int(timestamp) == 0:
|
||||||
|
timestamp = self.getTime()
|
||||||
|
if timestamp < 1400499858:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
timestamp = str(int(timestamp) + int(self.offset))
|
||||||
|
|
||||||
|
csplit = re.split(r"(.*?)_\[(.*?)\]", self.deviceName)
|
||||||
|
nodeTypeName = csplit[1]
|
||||||
|
uniqueID = csplit[2]
|
||||||
|
company = "194"
|
||||||
|
|
||||||
|
topic = 'meshify/db/%s/%s/%s/%s/%s' % (company, "_", nodeTypeName, uniqueID.lower(), channel)
|
||||||
|
print topic
|
||||||
|
msg = """[{"value":"%s"}]""" % (str(value))
|
||||||
|
print msg
|
||||||
|
self.q.put([topic, msg, 0])
|
||||||
|
|
||||||
|
def sendtodbJSON(self, channel, value, timestamp):
|
||||||
|
|
||||||
|
if int(timestamp) == 0:
|
||||||
|
timestamp = self.getTime()
|
||||||
|
if timestamp < 1400499858:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
timestamp = str(int(timestamp) + int(self.offset))
|
||||||
|
|
||||||
|
csplit = re.split(r"(.*?)_\[(.*?)\]", self.deviceName)
|
||||||
|
nodeTypeName = csplit[1]
|
||||||
|
uniqueID = csplit[2]
|
||||||
|
company = "194"
|
||||||
|
|
||||||
|
topic = 'meshify/db/%s/%s/%s/%s/%s' % (company, "_", nodeTypeName, uniqueID.lower(), channel)
|
||||||
|
print topic
|
||||||
|
msg = """[{"value":%s}]""" % (str(value))
|
||||||
|
print msg
|
||||||
|
self.q.put([topic, msg, 0])
|
||||||
|
def getTime(self):
|
||||||
|
return str(int(time.time() + int(self.offset)))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
1464
RPi Mistaway Originals/main.py
Normal file
1464
RPi Mistaway Originals/main.py
Normal file
File diff suppressed because it is too large
Load Diff
154
RPi Mistaway Originals/mainMeshify.py
Normal file
154
RPi Mistaway Originals/mainMeshify.py
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import time
|
||||||
|
import os
|
||||||
|
try:
|
||||||
|
import json
|
||||||
|
except:
|
||||||
|
import simplejson as json
|
||||||
|
import thread
|
||||||
|
import threading
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class start(threading.Thread):
|
||||||
|
|
||||||
|
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.daemon = True
|
||||||
|
self.offset = offset
|
||||||
|
self.company = companyId
|
||||||
|
self.name = name
|
||||||
|
self.number = number
|
||||||
|
self.q = Q
|
||||||
|
self.deviceName = name + '_[' + mac + ':' + number[0:2] + ':' + number[2:] + ']!'
|
||||||
|
print 'device name is:'
|
||||||
|
print self.deviceName
|
||||||
|
mac2 = mac.replace(":", "")
|
||||||
|
self.mac = mac2.upper()
|
||||||
|
self.version = "16" #mistification #added Nodes in v5
|
||||||
|
self.finished = threading.Event()
|
||||||
|
|
||||||
|
threading.Thread.start(self)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#this is a required function for all drivers, its goal is to upload some piece of data
|
||||||
|
#about your device so it can be seen on the web
|
||||||
|
def register(self):
|
||||||
|
#self.mainMistaway_hb('hb', 'On')
|
||||||
|
self.sendtodb("connected", "true", 0)
|
||||||
|
|
||||||
|
def stop (self):
|
||||||
|
self.finished.set()
|
||||||
|
self.join()
|
||||||
|
|
||||||
|
def sendtodb(self, channel, value, timestamp):
|
||||||
|
if int(timestamp) == 0:
|
||||||
|
timestamp = self.getTime()
|
||||||
|
if timestamp < 1400499858:
|
||||||
|
return
|
||||||
|
csplit = re.split(r"(.*?)_\[(.*?)\]", self.deviceName)
|
||||||
|
nodeTypeName = csplit[1]
|
||||||
|
uniqueID = csplit[2]
|
||||||
|
company = "194"
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
topic = 'meshify/db/%s/%s/%s/%s/%s' % (company, "_", nodeTypeName, uniqueID.lower(), channel)
|
||||||
|
print topic
|
||||||
|
if channel == "files":
|
||||||
|
#for the file structure I had to take off the " " around the value
|
||||||
|
msg = """[{"value":%s}]""" % (str(value))
|
||||||
|
else:
|
||||||
|
msg = """[{"value":"%s"}]""" % (str(value))
|
||||||
|
print msg
|
||||||
|
self.q.put([topic, msg, 0])
|
||||||
|
except:
|
||||||
|
print "didn't work to send up MQTT data"
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
#on startup send the version number
|
||||||
|
self.sendtodb("version", str(self.version), 0)
|
||||||
|
|
||||||
|
if os.path.isfile('/root/python_firmware/drivers/device_base.py'):
|
||||||
|
print "found new device_base file"
|
||||||
|
os.system("/bin/mv -f /root/python_firmware/drivers/device_base.py /root/python_firmware/device_base.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/device_base.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/device_base.pyc")
|
||||||
|
|
||||||
|
if os.path.isfile('/root/python_firmware/drivers/meshifyData.py'):
|
||||||
|
print "found new meshifyData file"
|
||||||
|
os.system("/bin/mv -f /root/python_firmware/drivers/meshifyData.py /root/python_firmware/meshifyData/meshifyData.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/meshifyData.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/meshifyData.pyc")
|
||||||
|
|
||||||
|
if os.path.isfile('/root/python_firmware/drivers/main.py'):
|
||||||
|
print "found new main.py file"
|
||||||
|
os.system("/bin/mv -f /root/python_firmware/drivers/main.py /root/python_firmware/main.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/main.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/main.pyc")
|
||||||
|
time.sleep(0.5)
|
||||||
|
os.system('/root/reboot')
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
self.mainMeshify_hb('hb', 'On')
|
||||||
|
self.sendtodb("connected", "true", 0)
|
||||||
|
time.sleep(3600 * 4)
|
||||||
|
except Exception, e:
|
||||||
|
print e
|
||||||
|
|
||||||
|
def mainMeshify_files(self, name, value):
|
||||||
|
name = 'files'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
dict = {}
|
||||||
|
for dirname, dirnames, filenames in os.walk(str(value)):
|
||||||
|
# print path to all subdirectories first.
|
||||||
|
|
||||||
|
|
||||||
|
print "##########################################"
|
||||||
|
print "new directory: " + dirname
|
||||||
|
print "##########################################"
|
||||||
|
# print path to all filenames.
|
||||||
|
tempDictParent = {}
|
||||||
|
for filename in filenames:
|
||||||
|
tempDict = {}
|
||||||
|
filepath = os.path.join(dirname, filename)
|
||||||
|
try:
|
||||||
|
fileMem = os.stat(filepath).st_size
|
||||||
|
fileDate = os.stat(filepath).st_mtime
|
||||||
|
except:
|
||||||
|
fileMem = ""
|
||||||
|
fileDate = ""
|
||||||
|
print filepath, fileMem, fileDate
|
||||||
|
tempDict["mem"] = fileMem
|
||||||
|
tempDict["date"] = fileDate
|
||||||
|
tempDictParent[filename] = tempDict
|
||||||
|
|
||||||
|
dict[dirname] = tempDictParent
|
||||||
|
|
||||||
|
|
||||||
|
# Advanced usage:
|
||||||
|
# editing the 'dirnames' list will stop os.walk() from recursing into there.
|
||||||
|
if '.git' in dirnames:
|
||||||
|
# don't go into any .git directories.
|
||||||
|
dirnames.remove('.git')
|
||||||
|
|
||||||
|
value = json.dumps(dict)
|
||||||
|
self.sendtodb(name, value, 0)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def mainMeshify_hb(self, name, value):
|
||||||
|
self.sendtodb(name, value, 0)
|
||||||
|
|
||||||
|
|
||||||
|
def getTime(self):
|
||||||
|
return str(int(time.time() + int(self.offset)))
|
||||||
|
|
||||||
109
RPi Mistaway Originals/meshifyData.py
Normal file
109
RPi Mistaway Originals/meshifyData.py
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
import urllib
|
||||||
|
try:
|
||||||
|
import json
|
||||||
|
except:
|
||||||
|
import simplejson as json
|
||||||
|
import pickle
|
||||||
|
|
||||||
|
MAC = "00409D53168A"
|
||||||
|
|
||||||
|
class meshifyData():
|
||||||
|
|
||||||
|
def __init__(self, MAC):
|
||||||
|
self.mac = MAC[0:6] + "FF-FF" + MAC[6:]
|
||||||
|
print "here is the mac: " + self.mac
|
||||||
|
#set the defaults
|
||||||
|
self.param_dict = {}
|
||||||
|
|
||||||
|
def checkConfig(self):
|
||||||
|
|
||||||
|
url = "https://f5rrbd3r45.execute-api.us-east-1.amazonaws.com/device_config?mac=" + self.mac
|
||||||
|
|
||||||
|
try:
|
||||||
|
f = urllib.urlopen(url)
|
||||||
|
except:
|
||||||
|
print "Error opening url for remote config"
|
||||||
|
#return the defaults
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = f.read()
|
||||||
|
#print s
|
||||||
|
if len(s) < 5:
|
||||||
|
return None
|
||||||
|
s = s[1:-1].replace("'", '"')
|
||||||
|
print s
|
||||||
|
data = json.loads(s)
|
||||||
|
#if we get there then replace the deviceList.txt
|
||||||
|
with open('/root/python_firmware/deviceList.txt', 'w') as myfile:
|
||||||
|
json.dump(data, myfile, indent=4)
|
||||||
|
return data
|
||||||
|
except Exception as e:
|
||||||
|
print e
|
||||||
|
#return the defaults
|
||||||
|
return None
|
||||||
|
|
||||||
|
def checkAPI(self):
|
||||||
|
|
||||||
|
|
||||||
|
offset = -21600
|
||||||
|
dst = False
|
||||||
|
companyId = "1"
|
||||||
|
|
||||||
|
|
||||||
|
url = "https://machines.meshify.com/api/gateway?macaddressForTimezone=" + self.mac
|
||||||
|
|
||||||
|
try:
|
||||||
|
f = urllib.urlopen(url)
|
||||||
|
except:
|
||||||
|
print "Error opening url"
|
||||||
|
#return the defaults
|
||||||
|
return offset, dst, companyId
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = f.read()
|
||||||
|
print s
|
||||||
|
data = json.loads(s)
|
||||||
|
offset = int(data["offset"])
|
||||||
|
dst = bool(int(data["dst"]))
|
||||||
|
print bool(int("0"))
|
||||||
|
companyId = data["companyId"]
|
||||||
|
return offset, dst, companyId
|
||||||
|
except Exception,e:
|
||||||
|
print e
|
||||||
|
#return the defaults
|
||||||
|
return -21600, False, "1"
|
||||||
|
|
||||||
|
def getdata(self):
|
||||||
|
#if the API fails and the company ID of 1 is returned then you need to
|
||||||
|
#check and see if you have pickled anything.
|
||||||
|
#if it doesn't fail, and it gives you something other than 1
|
||||||
|
#then you need to repickle the object
|
||||||
|
self.offset, self.dst, self.companyId = self.checkAPI()
|
||||||
|
if self.companyId == "1":
|
||||||
|
try:
|
||||||
|
self.param_dict = pickle.load( open( "params.p", "rb" ) )
|
||||||
|
except:
|
||||||
|
print self.offset, self.dst, self.companyId
|
||||||
|
return self.offset, self.dst, self.companyId
|
||||||
|
try:
|
||||||
|
self.offset = self.param_dict["offset"]
|
||||||
|
self.dst = self.param_dict["dst"]
|
||||||
|
self.companyId = self.param_dict["companyId"]
|
||||||
|
except:
|
||||||
|
return -21600, False, "1"
|
||||||
|
|
||||||
|
return self.offset, self.dst, self.companyId
|
||||||
|
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.param_dict["offset"] = self.offset
|
||||||
|
self.param_dict["dst"] = self.dst
|
||||||
|
self.param_dict["companyId"] = self.companyId
|
||||||
|
pickle.dump( self.param_dict, open( "params.p", "wb" ) )
|
||||||
|
print self.param_dict
|
||||||
|
print self.offset, self.dst, self.companyId
|
||||||
|
return self.offset, self.dst, self.companyId
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
BIN
meshifyDrivers/.DS_Store
vendored
BIN
meshifyDrivers/.DS_Store
vendored
Binary file not shown.
@@ -27,7 +27,7 @@ class start(threading.Thread):
|
|||||||
print self.deviceName
|
print self.deviceName
|
||||||
mac2 = mac.replace(":", "")
|
mac2 = mac.replace(":", "")
|
||||||
self.mac = mac2.upper()
|
self.mac = mac2.upper()
|
||||||
self.version = "17" #hp device management
|
self.version = "16" #mistification #added Nodes in v5
|
||||||
self.finished = threading.Event()
|
self.finished = threading.Event()
|
||||||
|
|
||||||
threading.Thread.start(self)
|
threading.Thread.start(self)
|
||||||
@@ -56,7 +56,7 @@ class start(threading.Thread):
|
|||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
topic = 'meshify/db/%s/%s/%s/%s/%s' % (company, "_", nodeTypeName, uniqueID, channel)
|
topic = 'meshify/db/%s/%s/%s/%s/%s' % (company, "_", nodeTypeName, uniqueID.lower(), channel)
|
||||||
print topic
|
print topic
|
||||||
if channel == "files":
|
if channel == "files":
|
||||||
#for the file structure I had to take off the " " around the value
|
#for the file structure I had to take off the " " around the value
|
||||||
@@ -71,6 +71,27 @@ class start(threading.Thread):
|
|||||||
def run(self):
|
def run(self):
|
||||||
#on startup send the version number
|
#on startup send the version number
|
||||||
self.sendtodb("version", str(self.version), 0)
|
self.sendtodb("version", str(self.version), 0)
|
||||||
|
|
||||||
|
if os.path.isfile('/root/python_firmware/drivers/device_base.py'):
|
||||||
|
print "found new device_base file"
|
||||||
|
os.system("/bin/mv -f /root/python_firmware/drivers/device_base.py /root/python_firmware/device_base.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/device_base.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/device_base.pyc")
|
||||||
|
|
||||||
|
if os.path.isfile('/root/python_firmware/drivers/meshifyData.py'):
|
||||||
|
print "found new meshifyData file"
|
||||||
|
os.system("/bin/mv -f /root/python_firmware/drivers/meshifyData.py /root/python_firmware/meshifyData/meshifyData.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/meshifyData.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/meshifyData.pyc")
|
||||||
|
|
||||||
|
if os.path.isfile('/root/python_firmware/drivers/main.py'):
|
||||||
|
print "found new main.py file"
|
||||||
|
os.system("/bin/mv -f /root/python_firmware/drivers/main.py /root/python_firmware/main.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/main.py")
|
||||||
|
os.system("/bin/rm -f /root/python_firmware/drivers/main.pyc")
|
||||||
|
time.sleep(0.5)
|
||||||
|
os.system('/root/reboot')
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
self.mainMeshify_hb('hb', 'On')
|
self.mainMeshify_hb('hb', 'On')
|
||||||
|
|||||||
BIN
migration/__pycache__/lattice.cpython-311.pyc
Normal file
BIN
migration/__pycache__/lattice.cpython-311.pyc
Normal file
Binary file not shown.
202
migration/convertDynamoDB.ipynb
Normal file
202
migration/convertDynamoDB.ipynb
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 10,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import boto3, json"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 44,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def convertToNewConfig(config):\n",
|
||||||
|
" #old form\n",
|
||||||
|
" #\"{'mainHP': 'https://hp-drivers.s3-us-west-2.amazonaws.com/mainMeshify/','m1': 'https://hp-drivers.s3-us-west-2.amazonaws.com/M1/','flowmonitor': 'https://s3.amazonaws.com/pocloud-drivers/flow-monitor/'}\"\n",
|
||||||
|
"\n",
|
||||||
|
" #new form\n",
|
||||||
|
" #\"{'mainHP': 'https://hp-thingsboard.s3.amazonaws.com/mainHP/','m1': 'https://hp-thingsboard.s3.amazonaws.com/M1/','flowmonitor': 'https://hp-thingsboard.s3.amazonaws.com/flowmonitor/'}\"\n",
|
||||||
|
" mainHPMapping = {\n",
|
||||||
|
" \"mainMeshify\": \"https://hp-thingsboard.s3.amazonaws.com/mainHP/\",\n",
|
||||||
|
" \"piflow\": \"https://hp-thingsboard.s3.amazonaws.com/mainHPRPI/\",\n",
|
||||||
|
" \"plcfresh\": \"https://hp-thingsboard.s3.amazonaws.com/mainHPPLCFRESH/\"\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" configMapping = {\n",
|
||||||
|
" \"abbflow\": \"https://hp-thingsboard.s3.amazonaws.com/abbflow/\",\n",
|
||||||
|
" \"advvfdipp\": \"https://hp-thingsboard.s3.amazonaws.com/advvfdipp/\",\n",
|
||||||
|
" \"dhsensor\": \"https://hp-thingsboard.s3.amazonaws.com/dhsensor/\",\n",
|
||||||
|
" \"dual_flowmeter\": \"https://hp-thingsboard.s3.amazonaws.com/dual_flowmeter/\",\n",
|
||||||
|
" \"flowmeterskid\": \"https://hp-thingsboard.s3.amazonaws.com/flowmeterskid/\",\n",
|
||||||
|
" \"flowmonitor\": \"https://hp-thingsboard.s3.amazonaws.com/flowmonitor/\",\n",
|
||||||
|
" \"PiFlow\": \"https://hp-thingsboard.s3.amazonaws.com/PiFlow/\",\n",
|
||||||
|
" \"ipp\": \"https://hp-thingsboard.s3.amazonaws.com/ipp/\",\n",
|
||||||
|
" \"plcpond\": \"https://hp-thingsboard.s3.amazonaws.com/plcpond/\",\n",
|
||||||
|
" \"multisensor\": \"https://hp-thingsboard.s3.amazonaws.com/multisensor/\",\n",
|
||||||
|
" \"dualactuator\": \"https://hp-thingsboard.s3.amazonaws.com/dualactuator/\",\n",
|
||||||
|
" \"dualactuatorpri\": \"https://hp-thingsboard.s3.amazonaws.com/dualactuatorpri/\",\n",
|
||||||
|
" \"plcfreshwater\": \"https://hp-thingsboard.s3.amazonaws.com/plcfreshwater/\",\n",
|
||||||
|
" \"pondlevel\": \"https://hp-thingsboard.s3.amazonaws.com/pondlevel/\",\n",
|
||||||
|
" \"promagmbs\": \"https://hp-thingsboard.s3.amazonaws.com/promagmbs/\",\n",
|
||||||
|
" \"poc\": \"https://hp-thingsboard.s3.amazonaws.com/poc/\",\n",
|
||||||
|
" \"recycle_train\": \"https://hp-thingsboard.s3.amazonaws.com/recycle_train/\",\n",
|
||||||
|
" \"rigpump\": \"https://hp-thingsboard.s3.amazonaws.com/rigpump/\",\n",
|
||||||
|
" \"submonitor\": \"https://hp-thingsboard.s3.amazonaws.com/submonitor/\",\n",
|
||||||
|
" \"swdcontroller\": \"https://hp-thingsboard.s3.amazonaws.com/swdcontroller/\",\n",
|
||||||
|
" \"tankalarms\": \"https://hp-thingsboard.s3.amazonaws.com/tankalarms/\",\n",
|
||||||
|
" \"tanktransfer\": \"https://hp-thingsboard.s3.amazonaws.com/tanktransfer/\",\n",
|
||||||
|
" \"tenflowmeterskid\": \"https://hp-thingsboard.s3.amazonaws.com/tenflowmeterskid/\",\n",
|
||||||
|
" \"transferlite\": \"https://hp-thingsboard.s3.amazonaws.com/transferlite/\",\n",
|
||||||
|
" \"m1\": \"https://hp-thingsboard.s3.amazonaws.com/m1/\"\n",
|
||||||
|
" }\n",
|
||||||
|
" config = json.loads(config.replace(\"'\", '\"'))\n",
|
||||||
|
" for x in dict.keys(config):\n",
|
||||||
|
" if x == \"mainHP\":\n",
|
||||||
|
" config[x] = mainHPMapping.get(config[x].split(\"/\")[-2], \"bad_request\")\n",
|
||||||
|
" else:\n",
|
||||||
|
" config[x] = configMapping.get(x, \"bad_request\")\n",
|
||||||
|
" config = json.dumps(config).replace('\"', \"'\")\n",
|
||||||
|
" print(config)\n",
|
||||||
|
" \n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"client = boto3.client('dynamodb')\n",
|
||||||
|
"table = \"HPDeviceList\"\n",
|
||||||
|
"macs = ['C4:93:00:0C:68:F9']\n",
|
||||||
|
"for mac in macs:\n",
|
||||||
|
" resp = client.get_item(Key={'mac': {'S': mac}}, TableName=table)\n",
|
||||||
|
" oldconfig = resp[\"Item\"]['config']\n",
|
||||||
|
" print(oldconfig)\n",
|
||||||
|
" urls = convertToNewConfig(oldconfig)\n",
|
||||||
|
" newconfig = oldconfig.copy()\n",
|
||||||
|
" newconfig['S'] = urls\n",
|
||||||
|
" print(newconfig)\n",
|
||||||
|
" client.update_item(\n",
|
||||||
|
" TableName=table,\n",
|
||||||
|
" Key={'mac': {'S': mac}},\n",
|
||||||
|
" ExpressionAttributeNames={\"#C\": 'config'},\n",
|
||||||
|
" ExpressionAttributeValues={':c': newconfig},\n",
|
||||||
|
" ReturnValues='ALL_NEW',\n",
|
||||||
|
" UpdateExpression='SET #C = :c'\n",
|
||||||
|
" )"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"client.list_tables()\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 6,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"{'Table': {'AttributeDefinitions': [{'AttributeName': 'mac',\n",
|
||||||
|
" 'AttributeType': 'S'}],\n",
|
||||||
|
" 'TableName': 'HPDeviceList',\n",
|
||||||
|
" 'KeySchema': [{'AttributeName': 'mac', 'KeyType': 'HASH'}],\n",
|
||||||
|
" 'TableStatus': 'ACTIVE',\n",
|
||||||
|
" 'CreationDateTime': datetime.datetime(2020, 6, 4, 13, 0, 51, 690000, tzinfo=tzlocal()),\n",
|
||||||
|
" 'ProvisionedThroughput': {'NumberOfDecreasesToday': 0,\n",
|
||||||
|
" 'ReadCapacityUnits': 5,\n",
|
||||||
|
" 'WriteCapacityUnits': 5},\n",
|
||||||
|
" 'TableSizeBytes': 100095,\n",
|
||||||
|
" 'ItemCount': 495,\n",
|
||||||
|
" 'TableArn': 'arn:aws:dynamodb:us-east-1:860246592755:table/HPDeviceList',\n",
|
||||||
|
" 'TableId': 'fdb15ece-4feb-4dca-ae90-909c9d31ca7d',\n",
|
||||||
|
" 'DeletionProtectionEnabled': False},\n",
|
||||||
|
" 'ResponseMetadata': {'RequestId': 'KJIR35CPC8J544NID1MP1R45NJVV4KQNSO5AEMVJF66Q9ASUAAJG',\n",
|
||||||
|
" 'HTTPStatusCode': 200,\n",
|
||||||
|
" 'HTTPHeaders': {'server': 'Server',\n",
|
||||||
|
" 'date': 'Thu, 09 Mar 2023 19:49:01 GMT',\n",
|
||||||
|
" 'content-type': 'application/x-amz-json-1.0',\n",
|
||||||
|
" 'content-length': '513',\n",
|
||||||
|
" 'connection': 'keep-alive',\n",
|
||||||
|
" 'x-amzn-requestid': 'KJIR35CPC8J544NID1MP1R45NJVV4KQNSO5AEMVJF66Q9ASUAAJG',\n",
|
||||||
|
" 'x-amz-crc32': '2354334289'},\n",
|
||||||
|
" 'RetryAttempts': 0}}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 6,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"client.describe_table(TableName=table)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 24,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"m1 = client.get_item(Key={'mac': {'S': \"C4:93:00:0C:68:F9\"}}, TableName=table)[\"Item\"]['config']['S']\n",
|
||||||
|
"piflow = client.get_item(Key={'mac': {'S': \"B2:78:EB:E7:83:45\"}}, TableName=table)[\"Item\"]['config']['S']\n",
|
||||||
|
"plcfreshwater = client.get_item(Key={'mac': {'S': \"00:00:05:00:00:08\"}}, TableName=table)[\"Item\"]['config']['S']"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 45,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"{'mainHP': 'https://hp-thingsboard.s3.amazonaws.com/mainHP/', 'm1': 'https://hp-thingsboard.s3.amazonaws.com/m1/', 'flowmonitor': 'https://hp-thingsboard.s3.amazonaws.com/flowmonitor/'}\n",
|
||||||
|
"{'mainHP': 'https://hp-thingsboard.s3.amazonaws.com/mainHPRPI/', 'PiFlow': 'https://hp-thingsboard.s3.amazonaws.com/PiFlow/'}\n",
|
||||||
|
"{'mainHP': 'https://hp-thingsboard.s3.amazonaws.com/mainHPPLCFRESH/', 'plcfreshwater': 'https://hp-thingsboard.s3.amazonaws.com/plcfreshwater/'}\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"convertToNewConfig(m1)\n",
|
||||||
|
"convertToNewConfig(piflow)\n",
|
||||||
|
"convertToNewConfig(plcfreshwater)"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "aws",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.11.0"
|
||||||
|
},
|
||||||
|
"orig_nbformat": 4
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
||||||
67
migration/convertVanity.ipynb
Normal file
67
migration/convertVanity.ipynb
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"#Device Name form: Jones Holton FW #2\n",
|
||||||
|
"#MQTT clientID form: jones-holton-fw-2"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 13,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Banay WW 18 #4 banay-ww-18-4\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"deviceName = \"Banay WW 18 #4\"\n",
|
||||||
|
"def convertVanityToClientId(deviceName):\n",
|
||||||
|
" mqttClientId = []\n",
|
||||||
|
" for c in deviceName:\n",
|
||||||
|
" if c == \" \":\n",
|
||||||
|
" mqttClientId.append(\"-\")\n",
|
||||||
|
" elif c.isalnum():\n",
|
||||||
|
" mqttClientId.append(c.lower())\n",
|
||||||
|
" elif c == '\"':\n",
|
||||||
|
" mqttClientId.append(\"in\")\n",
|
||||||
|
" elif c == '-':\n",
|
||||||
|
" mqttClientId.append(c)\n",
|
||||||
|
" mqttClientId = \"\".join(mqttClientId)\n",
|
||||||
|
" return mqttClientId\n",
|
||||||
|
"print(deviceName, convertVanityToClientId(deviceName))"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "thingsboard",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.10.5"
|
||||||
|
},
|
||||||
|
"orig_nbformat": 4
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
||||||
419
migration/lattice.py
Normal file
419
migration/lattice.py
Normal file
@@ -0,0 +1,419 @@
|
|||||||
|
"""Query Meshify for data."""
|
||||||
|
import json
|
||||||
|
import csv
|
||||||
|
from os import getenv
|
||||||
|
import getpass
|
||||||
|
import pickle
|
||||||
|
from pathlib import Path
|
||||||
|
import requests
|
||||||
|
import click
|
||||||
|
|
||||||
|
|
||||||
|
MESHIFY_BASE_URL = "https://194.p2121.net/api/" #getenv("MESHIFY_BASE_URL")
|
||||||
|
MESHIFY_USERNAME = "reportuser@henrypump.com" #getenv("MESHIFY_USERNAME")
|
||||||
|
MESHIFY_PASSWORD = "Kk8kMU2cc6vqVy" #getenv("MESHIFY_PASSWORD")
|
||||||
|
MESHIFY_AUTH = None
|
||||||
|
|
||||||
|
|
||||||
|
class NameNotFound(Exception):
|
||||||
|
"""Thrown when a name is not found in a list of stuff."""
|
||||||
|
|
||||||
|
def __init__(self, message, name, list_of_stuff, *args):
|
||||||
|
"""Initialize the NameNotFound Exception."""
|
||||||
|
self.message = message
|
||||||
|
self.name = name
|
||||||
|
self.list_of_stuff = list_of_stuff
|
||||||
|
super(NameNotFound, self).__init__(message, name, list_of_stuff, *args)
|
||||||
|
|
||||||
|
|
||||||
|
def dict_filter(it, *keys):
|
||||||
|
"""Filter dictionary results."""
|
||||||
|
for d in it:
|
||||||
|
yield dict((k, d[k]) for k in keys)
|
||||||
|
|
||||||
|
|
||||||
|
def check_setup():
|
||||||
|
"""Check the global parameters."""
|
||||||
|
global MESHIFY_USERNAME, MESHIFY_PASSWORD, MESHIFY_AUTH, MESHIFY_BASE_URL
|
||||||
|
if not MESHIFY_USERNAME or not MESHIFY_PASSWORD:
|
||||||
|
print("Simplify the usage by setting the meshify username and password as environment variables MESHIFY_USERNAME and MESHIFY_PASSWORD")
|
||||||
|
MESHIFY_USERNAME = input("Meshify Username: ")
|
||||||
|
MESHIFY_PASSWORD = getpass.getpass("Meshify Password: ")
|
||||||
|
|
||||||
|
MESHIFY_AUTH = requests.auth.HTTPBasicAuth(MESHIFY_USERNAME, MESHIFY_PASSWORD)
|
||||||
|
|
||||||
|
if not MESHIFY_BASE_URL:
|
||||||
|
print("Simplify the usage by setting the environment variable MESHIFY_BASE_URL")
|
||||||
|
MESHIFY_BASE_URL = input("Meshify Base URL: ")
|
||||||
|
|
||||||
|
|
||||||
|
def find_by_name(name, list_of_stuff):
|
||||||
|
"""Find an object in a list of stuff by its name parameter."""
|
||||||
|
for x in list_of_stuff:
|
||||||
|
if x['name'] == name:
|
||||||
|
return x
|
||||||
|
raise NameNotFound("Name not found!", name, list_of_stuff)
|
||||||
|
|
||||||
|
|
||||||
|
def GET(endpoint):
|
||||||
|
"""Make a query to the meshify API."""
|
||||||
|
check_setup()
|
||||||
|
if endpoint[0] == "/":
|
||||||
|
endpoint = endpoint[1:]
|
||||||
|
q_url = MESHIFY_BASE_URL + endpoint
|
||||||
|
q_req = requests.get(q_url, auth=MESHIFY_AUTH)
|
||||||
|
return json.loads(q_req.text) if q_req.status_code == 200 else []
|
||||||
|
|
||||||
|
|
||||||
|
def post_meshify_api(endpoint, data):
|
||||||
|
"""Post data to the meshify API."""
|
||||||
|
check_setup()
|
||||||
|
q_url = MESHIFY_BASE_URL + endpoint
|
||||||
|
q_req = requests.post(q_url, data=json.dumps(data), auth=MESHIFY_AUTH)
|
||||||
|
if q_req.status_code != 200:
|
||||||
|
print(q_req.status_code)
|
||||||
|
return json.loads(q_req.text) if q_req.status_code == 200 else []
|
||||||
|
|
||||||
|
|
||||||
|
def getNodeTypes():
|
||||||
|
return GET("nodetypes")
|
||||||
|
|
||||||
|
def getNodes():
|
||||||
|
return GET("nodes")
|
||||||
|
|
||||||
|
def getFolders():
|
||||||
|
return GET("folders")
|
||||||
|
|
||||||
|
def getChannelValues(nodeId):
|
||||||
|
return GET("data/current?nodeId={}".format(nodeId))
|
||||||
|
|
||||||
|
def getUsers():
|
||||||
|
return GET("users")
|
||||||
|
|
||||||
|
def decode_channel_parameters(channel):
|
||||||
|
"""Decode a channel object's parameters into human-readable format."""
|
||||||
|
channel_types = {
|
||||||
|
1: 'device',
|
||||||
|
5: 'static',
|
||||||
|
6: 'user input',
|
||||||
|
7: 'system'
|
||||||
|
}
|
||||||
|
|
||||||
|
io_options = {
|
||||||
|
0: 'readonly',
|
||||||
|
1: 'readwrite'
|
||||||
|
}
|
||||||
|
|
||||||
|
datatype_options = {
|
||||||
|
1: "float",
|
||||||
|
2: 'string',
|
||||||
|
3: 'integer',
|
||||||
|
4: 'boolean',
|
||||||
|
5: 'datetime',
|
||||||
|
6: 'timespan',
|
||||||
|
7: 'file',
|
||||||
|
8: 'latlng'
|
||||||
|
}
|
||||||
|
|
||||||
|
channel['channelType'] = channel_types[channel['channelType']]
|
||||||
|
channel['io'] = io_options[channel['io']]
|
||||||
|
channel['dataType'] = datatype_options[channel['dataType']]
|
||||||
|
return channel
|
||||||
|
|
||||||
|
|
||||||
|
def encode_channel_parameters(channel):
|
||||||
|
"""Encode a channel object from human-readable format."""
|
||||||
|
channel_types = {
|
||||||
|
'device': 1,
|
||||||
|
'static': 5,
|
||||||
|
'user input': 6,
|
||||||
|
'system': 7
|
||||||
|
}
|
||||||
|
|
||||||
|
io_options = {
|
||||||
|
'readonly': False,
|
||||||
|
'readwrite': True
|
||||||
|
}
|
||||||
|
|
||||||
|
datatype_options = {
|
||||||
|
"float": 1,
|
||||||
|
'string': 2,
|
||||||
|
'integer': 3,
|
||||||
|
'boolean': 4,
|
||||||
|
'datetime': 5,
|
||||||
|
'timespan': 6,
|
||||||
|
'file': 7,
|
||||||
|
'latlng': 8
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
channel['deviceTypeId'] = int(channel['deviceTypeId'])
|
||||||
|
channel['fromMe'] = channel['fromMe'].lower() == 'true'
|
||||||
|
channel['channelType'] = channel_types[channel['channelType'].lower()]
|
||||||
|
channel['io'] = io_options[channel['io'].lower()]
|
||||||
|
channel['dataType'] = datatype_options[channel['dataType'].lower()]
|
||||||
|
# channel['id'] = 1
|
||||||
|
return channel
|
||||||
|
except KeyError as e:
|
||||||
|
click.echo("Unable to convert channel {} due to bad key: {}".format(channel['name'], e))
|
||||||
|
|
||||||
|
|
||||||
|
def make_modbusmap_channel(i, chan, device_type_name):
|
||||||
|
"""Make a channel object for a row in the CSV."""
|
||||||
|
json_obj = {
|
||||||
|
"ah": "",
|
||||||
|
"bytary": None,
|
||||||
|
"al": "",
|
||||||
|
"vn": chan['subTitle'], # Name
|
||||||
|
"ct": "number", # ChangeType
|
||||||
|
"le": "16", # Length(16 or 32)
|
||||||
|
"grp": str(chan['guaranteedReportPeriod']), # GuaranteedReportPeriod
|
||||||
|
"la": None,
|
||||||
|
"chn": chan['name'], # ChannelName
|
||||||
|
"un": "1", # DeviceNumber
|
||||||
|
"dn": device_type_name, # deviceName
|
||||||
|
"vm": None,
|
||||||
|
"lrt": "0",
|
||||||
|
"da": "300", # DeviceAddress
|
||||||
|
"a": chan['helpExplanation'], # TagName
|
||||||
|
"c": str(chan['change']), # Change
|
||||||
|
"misc_u": str(chan['units']), # Units
|
||||||
|
"f": "1", # FunctionCode
|
||||||
|
"mrt": str(chan['minReportTime']), # MinimumReportTime
|
||||||
|
"m": "none", # multiplier
|
||||||
|
"m1ch": "2-{}".format(i),
|
||||||
|
"mv": "0", # MultiplierValue
|
||||||
|
"s": "On",
|
||||||
|
"r": "{}-{}".format(chan['min'], chan['max']), # range
|
||||||
|
"t": "int" # type
|
||||||
|
}
|
||||||
|
return json_obj
|
||||||
|
|
||||||
|
|
||||||
|
def combine_modbusmap_and_channel(channel_obj, modbus_map):
|
||||||
|
"""Add the parameters from the modbus map to the channel object."""
|
||||||
|
channel_part = modbus_map["1"]["addresses"]["300"]
|
||||||
|
for c in channel_part:
|
||||||
|
if channel_part[c]["chn"] == channel_obj['name']:
|
||||||
|
channel_obj['units'] = channel_part[c]["misc_u"]
|
||||||
|
try:
|
||||||
|
min_max_range = channel_part[c]["r"].split("-")
|
||||||
|
channel_obj['min'] = int(min_max_range[0])
|
||||||
|
channel_obj['max'] = int(min_max_range[1])
|
||||||
|
except Exception:
|
||||||
|
channel_obj['min'] = None
|
||||||
|
channel_obj['max'] = None
|
||||||
|
|
||||||
|
channel_obj['change'] = float(channel_part[c]["c"])
|
||||||
|
channel_obj['guaranteedReportPeriod'] = int(channel_part[c]["grp"])
|
||||||
|
channel_obj['minReportTime'] = int(channel_part[c]["mrt"])
|
||||||
|
return channel_obj
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def cli():
|
||||||
|
"""Command Line Interface."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.argument("device_type_name")
|
||||||
|
@click.option("-o", '--output-file', default=None, help="Where to put the CSV of channels.")
|
||||||
|
@click.option("-m", '--modbusmap-file', default="modbusMap.p", help="The location of the modbusMap.p file")
|
||||||
|
def get_channel_csv(device_type_name, output_file, modbusmap_file):
|
||||||
|
"""Query the meshify API and create a CSV of the current channels."""
|
||||||
|
channel_fieldnames = [
|
||||||
|
'id',
|
||||||
|
'name',
|
||||||
|
'deviceTypeId',
|
||||||
|
'fromMe',
|
||||||
|
'io',
|
||||||
|
'subTitle',
|
||||||
|
'helpExplanation',
|
||||||
|
'channelType',
|
||||||
|
'dataType',
|
||||||
|
'defaultValue',
|
||||||
|
'regex',
|
||||||
|
'regexErrMsg',
|
||||||
|
'units',
|
||||||
|
'min',
|
||||||
|
'max',
|
||||||
|
'change',
|
||||||
|
'guaranteedReportPeriod',
|
||||||
|
'minReportTime'
|
||||||
|
]
|
||||||
|
devicetypes = GET('devicetypes')
|
||||||
|
this_devicetype = find_by_name(device_type_name, devicetypes)
|
||||||
|
channels = GET('devicetypes/{}/channels'.format(this_devicetype['id']))
|
||||||
|
modbus_map = None
|
||||||
|
|
||||||
|
if Path(modbusmap_file).exists():
|
||||||
|
with open(modbusmap_file, 'rb') as open_mbs_file:
|
||||||
|
modbus_map = pickle.load(open_mbs_file)
|
||||||
|
|
||||||
|
if not output_file:
|
||||||
|
output_file = 'channels_{}.csv'.format(device_type_name)
|
||||||
|
|
||||||
|
with open(output_file, 'w') as csvfile:
|
||||||
|
writer = csv.DictWriter(csvfile, fieldnames=channel_fieldnames)
|
||||||
|
|
||||||
|
writer.writeheader()
|
||||||
|
for ch in channels:
|
||||||
|
if not modbus_map:
|
||||||
|
ch['units'] = None
|
||||||
|
ch['min'] = None
|
||||||
|
ch['max'] = None
|
||||||
|
ch['change'] = None
|
||||||
|
ch['guaranteedReportPeriod'] = None
|
||||||
|
ch['minReportTime'] = None
|
||||||
|
else:
|
||||||
|
combined = combine_modbusmap_and_channel(ch, modbus_map)
|
||||||
|
if combined:
|
||||||
|
ch = combined
|
||||||
|
writer.writerow(decode_channel_parameters(ch))
|
||||||
|
|
||||||
|
click.echo("Wrote channels to {}".format(output_file))
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.argument("device_type_name")
|
||||||
|
@click.argument("csv_file")
|
||||||
|
def post_channel_csv(device_type_name, csv_file):
|
||||||
|
"""Post values from a CSV to Meshify Channel API."""
|
||||||
|
devicetypes = GET('devicetypes')
|
||||||
|
this_devicetype = find_by_name(device_type_name, devicetypes)
|
||||||
|
|
||||||
|
with open(csv_file, 'r') as inp_file:
|
||||||
|
reader = csv.DictReader(inp_file)
|
||||||
|
for row in dict_filter(reader, 'name',
|
||||||
|
'deviceTypeId',
|
||||||
|
'fromMe',
|
||||||
|
'io',
|
||||||
|
'subTitle',
|
||||||
|
'helpExplanation',
|
||||||
|
'channelType',
|
||||||
|
'dataType',
|
||||||
|
'defaultValue',
|
||||||
|
'regex',
|
||||||
|
'regexErrMsg'):
|
||||||
|
# print(row)
|
||||||
|
# print(encode_channel_parameters(row))
|
||||||
|
# click.echo(json.dumps(encode_channel_parameters(row), indent=4))
|
||||||
|
if post_meshify_api('devicetypes/{}/channels'.format(this_devicetype['id']), encode_channel_parameters(row)):
|
||||||
|
click.echo("Successfully added channel {}".format(row['name']))
|
||||||
|
else:
|
||||||
|
click.echo("Unable to add channel {}".format(row['name']))
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
def print_channel_options():
|
||||||
|
"""Print channel options for use with the csv files."""
|
||||||
|
channel_types = ['device', 'static', 'user input', 'system']
|
||||||
|
io_options = ['readonly', 'readwrite']
|
||||||
|
datatype_options = [
|
||||||
|
"float",
|
||||||
|
'string',
|
||||||
|
'integer',
|
||||||
|
'boolean',
|
||||||
|
'datetime',
|
||||||
|
'timespan',
|
||||||
|
'file',
|
||||||
|
'latlng'
|
||||||
|
]
|
||||||
|
|
||||||
|
click.echo("\n\nchannelType options")
|
||||||
|
click.echo("===================")
|
||||||
|
for chan in channel_types:
|
||||||
|
click.echo(chan)
|
||||||
|
|
||||||
|
click.echo("\n\nio options")
|
||||||
|
click.echo("==========")
|
||||||
|
for i in io_options:
|
||||||
|
click.echo(i)
|
||||||
|
|
||||||
|
click.echo("\n\ndataType options")
|
||||||
|
click.echo("================")
|
||||||
|
for d in datatype_options:
|
||||||
|
click.echo(d)
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.argument("device_type_name")
|
||||||
|
@click.argument("csv_file")
|
||||||
|
def create_modbusMap(device_type_name, csv_file):
|
||||||
|
"""Create modbusMap.p from channel csv file."""
|
||||||
|
modbusMap = {
|
||||||
|
"1": {
|
||||||
|
"c": "ETHERNET/IP",
|
||||||
|
"b": "192.168.1.10",
|
||||||
|
"addresses": {
|
||||||
|
"300": {}
|
||||||
|
},
|
||||||
|
"f": "Off",
|
||||||
|
"p": "",
|
||||||
|
"s": "1"
|
||||||
|
},
|
||||||
|
"2": {
|
||||||
|
"c": "M1-485",
|
||||||
|
"b": "9600",
|
||||||
|
"addresses": {},
|
||||||
|
"f": "Off",
|
||||||
|
"p": "None",
|
||||||
|
"s": "1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ind = 1
|
||||||
|
with open(csv_file, 'r') as inp_file:
|
||||||
|
reader = csv.DictReader(inp_file)
|
||||||
|
for row in reader:
|
||||||
|
modbusMap["1"]["addresses"]["300"]["2-{}".format(ind)] = make_modbusmap_channel(ind, row, device_type_name)
|
||||||
|
ind += 1
|
||||||
|
with open("modbusMap.p", 'wb') as mod_map_file:
|
||||||
|
pickle.dump(modbusMap, mod_map_file, protocol=0)
|
||||||
|
|
||||||
|
with open("modbusMap.json", 'w') as json_file:
|
||||||
|
json.dump(modbusMap, json_file, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option("-i", "--input-file", default="modbusMap.p", help="The modbus map pickle file to convert.")
|
||||||
|
@click.option("-o", "--output", default="modbusMap.json", help="The modbus map json file output filename.")
|
||||||
|
def pickle_to_json(input_file, output):
|
||||||
|
"""Convert a pickle file to a json file."""
|
||||||
|
if not Path(input_file).exists():
|
||||||
|
click.echo("Pickle file {} does not exist".format(input_file))
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(input_file, 'rb') as picklefile:
|
||||||
|
input_contents = pickle.load(picklefile)
|
||||||
|
|
||||||
|
with open(output, 'w') as outfile:
|
||||||
|
json.dump(input_contents, outfile, indent=4)
|
||||||
|
click.echo("Wrote from {} to {}.".format(input_file, output))
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option("-i", "--input-file", default="modbusMap.json", help="The modbus map json file to convert.")
|
||||||
|
@click.option("-o", "--output", default="modbusMap.p", help="The modbus map pickle file output filename.")
|
||||||
|
def json_to_pickle(input_file, output):
|
||||||
|
"""Convert a pickle file to a json file."""
|
||||||
|
if not Path(input_file).exists():
|
||||||
|
click.echo("JSON file {} does not exist".format(input_file))
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(input_file, 'rb') as json_file:
|
||||||
|
input_contents = json.load(json_file)
|
||||||
|
|
||||||
|
with open(output, 'wb') as outfile:
|
||||||
|
pickle.dump(input_contents, outfile, protocol=0)
|
||||||
|
click.echo("Wrote from {} to {}.".format(input_file, output))
|
||||||
|
|
||||||
|
|
||||||
|
cli.add_command(get_channel_csv)
|
||||||
|
cli.add_command(post_channel_csv)
|
||||||
|
cli.add_command(print_channel_options)
|
||||||
|
cli.add_command(create_modbusMap)
|
||||||
|
cli.add_command(pickle_to_json)
|
||||||
|
cli.add_command(json_to_pickle)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
cli()
|
||||||
248
migration/migration.ipynb
Normal file
248
migration/migration.ipynb
Normal file
@@ -0,0 +1,248 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 8,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"#migration script to rule them all\n",
|
||||||
|
"import boto3, json, lattice"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def convertToNewConfig(config):\n",
|
||||||
|
" #old form\n",
|
||||||
|
" #\"{'mainHP': 'https://hp-drivers.s3-us-west-2.amazonaws.com/mainMeshify/','m1': 'https://hp-drivers.s3-us-west-2.amazonaws.com/M1/','flowmonitor': 'https://s3.amazonaws.com/pocloud-drivers/flow-monitor/'}\"\n",
|
||||||
|
"\n",
|
||||||
|
" #new form\n",
|
||||||
|
" #\"{'mainHP': 'https://hp-thingsboard.s3.amazonaws.com/mainHP/','m1': 'https://hp-thingsboard.s3.amazonaws.com/M1/','flowmonitor': 'https://hp-thingsboard.s3.amazonaws.com/flowmonitor/'}\"\n",
|
||||||
|
" mainHPMapping = {\n",
|
||||||
|
" \"mainMeshify\": \"https://hp-thingsboard.s3.amazonaws.com/mainHP/\",\n",
|
||||||
|
" \"piflow\": \"https://hp-thingsboard.s3.amazonaws.com/mainHPRPI/\",\n",
|
||||||
|
" \"plcfresh\": \"https://hp-thingsboard.s3.amazonaws.com/mainHPPLCFRESH/\"\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" configMapping = {\n",
|
||||||
|
" \"abbflow\": \"https://hp-thingsboard.s3.amazonaws.com/abbflow/\",\n",
|
||||||
|
" \"advvfdipp\": \"https://hp-thingsboard.s3.amazonaws.com/advvfdipp/\",\n",
|
||||||
|
" \"dhsensor\": \"https://hp-thingsboard.s3.amazonaws.com/dhsensor/\",\n",
|
||||||
|
" \"dual_flowmeter\": \"https://hp-thingsboard.s3.amazonaws.com/dual_flowmeter/\",\n",
|
||||||
|
" \"flowmeterskid\": \"https://hp-thingsboard.s3.amazonaws.com/flowmeterskid/\",\n",
|
||||||
|
" \"flowmonitor\": \"https://hp-thingsboard.s3.amazonaws.com/flowmonitor/\",\n",
|
||||||
|
" \"PiFlow\": \"https://hp-thingsboard.s3.amazonaws.com/PiFlow/\",\n",
|
||||||
|
" \"ipp\": \"https://hp-thingsboard.s3.amazonaws.com/ipp/\",\n",
|
||||||
|
" \"plcpond\": \"https://hp-thingsboard.s3.amazonaws.com/plcpond/\",\n",
|
||||||
|
" \"multisensor\": \"https://hp-thingsboard.s3.amazonaws.com/multisensor/\",\n",
|
||||||
|
" \"dualactuator\": \"https://hp-thingsboard.s3.amazonaws.com/dualactuator/\",\n",
|
||||||
|
" \"dualactuatorpri\": \"https://hp-thingsboard.s3.amazonaws.com/dualactuatorpri/\",\n",
|
||||||
|
" \"plcfreshwater\": \"https://hp-thingsboard.s3.amazonaws.com/plcfreshwater/\",\n",
|
||||||
|
" \"pondlevel\": \"https://hp-thingsboard.s3.amazonaws.com/pondlevel/\",\n",
|
||||||
|
" \"promagmbs\": \"https://hp-thingsboard.s3.amazonaws.com/promagmbs/\",\n",
|
||||||
|
" \"poc\": \"https://hp-thingsboard.s3.amazonaws.com/poc/\",\n",
|
||||||
|
" \"recycle_train\": \"https://hp-thingsboard.s3.amazonaws.com/recycle_train/\",\n",
|
||||||
|
" \"rigpump\": \"https://hp-thingsboard.s3.amazonaws.com/rigpump/\",\n",
|
||||||
|
" \"submonitor\": \"https://hp-thingsboard.s3.amazonaws.com/submonitor/\",\n",
|
||||||
|
" \"swdcontroller\": \"https://hp-thingsboard.s3.amazonaws.com/swdcontroller/\",\n",
|
||||||
|
" \"tankalarms\": \"https://hp-thingsboard.s3.amazonaws.com/tankalarms/\",\n",
|
||||||
|
" \"tanktransfer\": \"https://hp-thingsboard.s3.amazonaws.com/tanktransfer/\",\n",
|
||||||
|
" \"tenflowmeterskid\": \"https://hp-thingsboard.s3.amazonaws.com/tenflowmeterskid/\",\n",
|
||||||
|
" \"transferlite\": \"https://hp-thingsboard.s3.amazonaws.com/transferlite/\",\n",
|
||||||
|
" \"m1\": \"https://hp-thingsboard.s3.amazonaws.com/m1/\"\n",
|
||||||
|
" }\n",
|
||||||
|
" config = json.loads(config.replace(\"'\", '\"'))\n",
|
||||||
|
" for x in dict.keys(config):\n",
|
||||||
|
" if x == \"mainHP\":\n",
|
||||||
|
" config[x] = mainHPMapping.get(config[x].split(\"/\")[-2], \"bad_request\")\n",
|
||||||
|
" else:\n",
|
||||||
|
" config[x] = configMapping.get(x, \"bad_request\")\n",
|
||||||
|
" config = json.dumps(config).replace('\"', \"'\")\n",
|
||||||
|
" print(config)\n",
|
||||||
|
" \n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def updateConfig(mac, client, table):\n",
|
||||||
|
" resp = client.get_item(Key={'mac': {'S': mac}}, TableName=table)\n",
|
||||||
|
" oldconfig = resp[\"Item\"]['config']\n",
|
||||||
|
" print(oldconfig)\n",
|
||||||
|
" urls = convertToNewConfig(oldconfig)\n",
|
||||||
|
" newconfig = oldconfig.copy()\n",
|
||||||
|
" newconfig['S'] = urls\n",
|
||||||
|
" print(newconfig)\n",
|
||||||
|
" client.update_item(\n",
|
||||||
|
" TableName=table,\n",
|
||||||
|
" Key={'mac': {'S': mac}},\n",
|
||||||
|
" ExpressionAttributeNames={\"#C\": 'config'},\n",
|
||||||
|
" ExpressionAttributeValues={':c': newconfig},\n",
|
||||||
|
" ReturnValues='ALL_NEW',\n",
|
||||||
|
" UpdateExpression='SET #C = :c'\n",
|
||||||
|
" )"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"client = boto3.client('dynamodb')\n",
|
||||||
|
"table = \"HPDeviceList\"\n",
|
||||||
|
"macs = ['C4:93:00:0C:68:F9']\n",
|
||||||
|
"for mac in macs:\n",
|
||||||
|
" updateConfig(mac, client, table)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 14,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def convertVanityToClientId(deviceName):\n",
|
||||||
|
" #Device Name form: Jones Holton FW #2\n",
|
||||||
|
" #MQTT clientID form: jones-holton-fw-2\n",
|
||||||
|
" mqttClientId = []\n",
|
||||||
|
" for c in deviceName:\n",
|
||||||
|
" if c == \" \":\n",
|
||||||
|
" mqttClientId.append(\"-\")\n",
|
||||||
|
" elif c.isalnum():\n",
|
||||||
|
" mqttClientId.append(c.lower())\n",
|
||||||
|
" elif c == '\"':\n",
|
||||||
|
" mqttClientId.append(\"in\")\n",
|
||||||
|
" elif c == '-':\n",
|
||||||
|
" mqttClientId.append(c)\n",
|
||||||
|
" mqttClientId = \"\".join(mqttClientId)\n",
|
||||||
|
" return mqttClientId"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 5,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"nodeTypes = {\n",
|
||||||
|
" \"tankalarms\": 95\n",
|
||||||
|
"}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 12,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"[\n",
|
||||||
|
" {\n",
|
||||||
|
" \"id\": 73138,\n",
|
||||||
|
" \"parentNodeId\": 73138,\n",
|
||||||
|
" \"aliasNodeIds\": [],\n",
|
||||||
|
" \"folderId\": 35662,\n",
|
||||||
|
" \"nodeTypeId\": 95,\n",
|
||||||
|
" \"uniqueId\": \"b8:27:eb:3d:e9:11:01:99\",\n",
|
||||||
|
" \"vanity\": \"Fasken AW Battery Tank Alarms\",\n",
|
||||||
|
" \"tags\": [],\n",
|
||||||
|
" \"location\": null,\n",
|
||||||
|
" \"metadata\": {},\n",
|
||||||
|
" \"isActive\": true,\n",
|
||||||
|
" \"archiveNumber\": 0,\n",
|
||||||
|
" \"createdAt\": \"2021-08-19T17:54:44Z\",\n",
|
||||||
|
" \"updatedAt\": \"2021-08-19T17:54:44Z\",\n",
|
||||||
|
" \"folderName\": \"Fasken AW Battery\"\n",
|
||||||
|
" },\n",
|
||||||
|
" {\n",
|
||||||
|
" \"id\": 79215,\n",
|
||||||
|
" \"parentNodeId\": 79215,\n",
|
||||||
|
" \"aliasNodeIds\": [],\n",
|
||||||
|
" \"folderId\": 36624,\n",
|
||||||
|
" \"nodeTypeId\": 95,\n",
|
||||||
|
" \"uniqueId\": \"b8:27:eb:26:55:c0:01:99\",\n",
|
||||||
|
" \"vanity\": \"Fee BM Tank Alarms\",\n",
|
||||||
|
" \"tags\": [],\n",
|
||||||
|
" \"location\": null,\n",
|
||||||
|
" \"metadata\": {},\n",
|
||||||
|
" \"isActive\": true,\n",
|
||||||
|
" \"archiveNumber\": 0,\n",
|
||||||
|
" \"createdAt\": \"2022-03-17T23:01:35Z\",\n",
|
||||||
|
" \"updatedAt\": \"2022-03-17T23:01:35Z\",\n",
|
||||||
|
" \"folderName\": \"Fee BM\"\n",
|
||||||
|
" }\n",
|
||||||
|
"]\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"#Get nodes and filter by node type\n",
|
||||||
|
"nodes = lattice.getNodes()\n",
|
||||||
|
"folders = lattice.getFolders()\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 15,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Fasken AW Battery fasken-aw-battery\n",
|
||||||
|
"Fee BM fee-bm\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"desiredNodes = []\n",
|
||||||
|
"desiredType = nodeTypes[\"tankalarms\"]\n",
|
||||||
|
"for node in nodes:\n",
|
||||||
|
" if node[\"nodeTypeId\"] == desiredType:\n",
|
||||||
|
" for folder in folders:\n",
|
||||||
|
" if folder[\"id\"] == node[\"folderId\"]:\n",
|
||||||
|
" node[\"folderName\"] = folder[\"name\"]\n",
|
||||||
|
" break\n",
|
||||||
|
" desiredNodes.append(node)\n",
|
||||||
|
"\n",
|
||||||
|
"#print(json.dumps(desiredNodes, indent=2))\n",
|
||||||
|
"for n in desiredNodes:\n",
|
||||||
|
" print(n[\"folderName\"],convertVanityToClientId(n[\"folderName\"]))\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "aws",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.11.0"
|
||||||
|
},
|
||||||
|
"orig_nbformat": 4
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user