From 8902536098b179103377c2f90a34fd5c6a51e7e3 Mon Sep 17 00:00:00 2001 From: Nico Melone Date: Tue, 12 Nov 2024 08:39:04 -0600 Subject: [PATCH] added transferlite and updates to rigpump --- .DS_Store | Bin 18436 -> 18436 bytes Pub_Sub/.DS_Store | Bin 38916 -> 40964 bytes .../thingsboard/ek_facility_measures.csv | 29 +- .../thingsboard/flowmeterskid_measures.csv | 16 + .../thingsboard/advvfdipp_ma_tb.cfg | 1456 +++++++++++++++++ Pub_Sub/rigpump/.DS_Store | Bin 10244 -> 10244 bytes Pub_Sub/rigpump/thingsboard/pub/sendData.py | 2 - .../transferlite/thingsboard/pub/sendData.py | 89 + .../thingsboard/sub/receiveCommand.py | 135 ++ device.csv | 6 + devices.csv | 6 + 11 files changed, 1731 insertions(+), 8 deletions(-) create mode 100644 Pub_Sub/flowmeterskid/thingsboard/flowmeterskid_measures.csv create mode 100644 Pub_Sub/oradvvfdipp/thingsboard/advvfdipp_ma_tb.cfg create mode 100644 Pub_Sub/transferlite/thingsboard/pub/sendData.py create mode 100644 Pub_Sub/transferlite/thingsboard/sub/receiveCommand.py create mode 100644 device.csv create mode 100644 devices.csv diff --git a/.DS_Store b/.DS_Store index e5a2b094021668c3623d9b804284f0d1059e4e0d..fbb6dc9aae5d32dda1690f756809d7f34e481800 100644 GIT binary patch delta 1331 zcmds$ZERCj7{~AbbnDLL2s_&nxm%^}80j*&EbU&pA=%tZ7YE&(&;f-_ww7MuhORB` z9V!hL%}59VGjkJ-ynG=>G@yYjgqJPEkZ8oHBn}f}*a!8q;S=A?^tAWRglLSPy&uj! z&;L2k%e~L<%**Z6wPQ~aJe zw8d9YYU^C}>zRj_^Mp(v7ENEHFr7SXo10XRhZBB-T3&y^!?=UTs9ZYSt_l;jvE_Nj zcp1;h!dF-8>SjHROEO=wj`oK{BFb-3N*g5mF(0dI&C2Rf!3PyVbfX9T2w?y(;brVZ z1fzHb6Nn*>14tr;42~d+JdUG)6L<@!aRz7c4&KKHIFAdsh>!3wKEV}S#SPrVZQQ{; z7VrhW!y+D8WeM!^!z+DIYlGj+N7aYPmC3nnHTRx*_XIPUw{HLG!d>AmlbWT2k}erj z=};)1Ovi;r7k^k>Js$nC_i2CPU;UXwfyjaMRHj(cb>DB!dzg{anG~W!KbX>Z7dCuAKz~OHB=ZP!3 z_w0`D-DgicYM$BJU051T#iq1}l~uf2sa0H*H_+>t70c8*h|U=2pD2nD=-hW@y9d zg#N0oC9LyqWdo&SMZG_(iPe%>qtYY`B1UuiF)J}sS+guS7(J|~?9c#fr2?^Cg=8$Q zC9QNp)=33tdyZ<^oSsfu;#Ss0tDwqJ@sFmol##RK9;L6y&i-QYp;*os$!MvSwg!|J zDgTP@%N>ekwUO6STGomgP{Nc>@XC^23>rx2eMZ{$x|I=1_ZIW@CK4i5HLt4Np+uAk z&h^w$-G~{Yr-*n>=P3XF@6a}z(XM9;WpJVb9JQ#!b6AIsXhj>^(TnW}V_1M3!FUPc zNoWG#bP3?s1-frz1}6o)?+JL%33xxmr}zxla2=oHKEB2`_!d9nXZ(U+@dV5GgGgi* zsUbenNSeq-vWc{jt_`G*go`9e(qB}MS@z0G#?3$=`k!%n;rDX;Hb|0vJ^zpV^lGnAO82@e-Eg;e z-fmm;l#2!um10OXQ3D}HF#L&_5E@7gL`@TozGx&38smdL@J@pAM2K^@r-_LepWTO< z{eHhY^PAu1GbhgA#2H*~Vs$s->4ag~fxu57o|4kC6;v))lvUncyZ7|(9dK4R^f+&_ zrQGPOVHxp+J~C%$_Vk!)P3fj)m`6qoQ`3*pO88~AVtHK+ul251o~KNcS*a@a{T%)JK|&FOuY}>fG+MS3Q&4fL3fq7rL<>JJ5$g3}GMkBZ@m zA%P@PNF#$RUd3_b@H*bcDa_*?yodL37Uystmv9AF@iDI9CT?L7pW-&|;7fdqAMi8o zyQK--SsHZb@}iXR$oanND7l(m=vuS1(ouYT;^Z%mNK2$v>4;=Vb~4r58&9O-!lQ3= zoNbFCQ&TU`Q!ZlyE5|Z;E;9J5j)~2~P6bY_=sB zY7hH;!Eop`zb_l?*c|fv!fhevc<7|(!}3E5W#R8Ub)Emr27h5RHg@1(k&T!$zqKoO zIclm&eYvWVRR{=pY$k5JG-KE=vwBJk?sIiq8P`->A4qA&i-xWV2;@StzK-*Q`D8~% z7o%2O(9p=0sAasO3q0HpH~JMNdQ<=>pm3c)P~js9HLm9pf;hKOM@ZqLvwFrdQl{X} zHMeY4cuf47q;A?)9#+@xj&MGaFTGc_?4h)7ieoJn>-Y9?e!!YmGy2e+sb|>NMSr*O z53`Cwl3&)R8Fp$gQ%na$Gh_K3do-=kN(HN|;(RztDZ6`^F~giSY}FQL8y3@y^;C9a zxs_Or3NX~50UOYSX0!>0Bj~}NBEiELFVL$&hY1_An8Pta?ppYkMN1$ z^fP>pFYpbPaZm92BYwq0{D$B0gis=raOV)KJKKW${u5B$k4vkca!>zfy*g!$PyPUICpZoO diff --git a/Pub_Sub/.DS_Store b/Pub_Sub/.DS_Store index 6f7593a3051c091dd907e598895e51d79650325c..ed6354254cc3bff0aa01751e488e4ef818b343a2 100644 GIT binary patch delta 2460 zcmeHJdvH|M89%>sH{ot_lf5y!@$Ms=C65MSiP={+uciTv5)xt%Gy$S)l3g&8*OCn= zF%m)3w2BZvoQi!=#6G5uGiEMWTb~^V7#$ytq80`Xwza~HG7efh?f5!(H!{jl|LF8j zcjnx4zVF_1zH@%(_xrxR<57Ocv)suT!{XtnkFg0cltnXoRbg!Rq%)8XVKDR5s^43X zO|4cJ?XxNJtGATZ%=8^ulytmuq2mi%7C2H%~uulc|Dd6#Nh`*2rJe7GlZi&`Yh zNw3qT(ek)m)3mkG&O~gzY2DARejA>7De(Xd$^8>-Ev0n~^3?rZPId%uSb1 z)X=KoW@!@l((O&T^x{MzJ=U}fr|6dr1$wNRBirE5`JbJ(EicheZ+A+1PU}C@zvV{h z>b4PgUDsf5&rqVg`~+%EXQ?i?W@-)w7=Cx`^L)Xvi2qPY-17K-flH1 zRwa-66B?bpQlY2XOQF!a?d8qNs-d2(F||`RAG<`&u`KW^%Bqb${ZVyDT$14__bHCA zfo-@#6^i2nZr8ihJn|HN9~k~&mT{ruShV>ErEwk@Ce*%hprMc5`b!gjLz*?xAA z9cD+^>jKYzu=m*~065Z+j!amQj{;gpH20VU(vz5h{HKDvd|X_hlPAY6-oJ=*XO#>h~KqqT1 z5u56^`KzkLeuAlVa(+MYts$Bou+x%pCs_vFSg4l`>KP`1#ZJoU_3L9B+i>v^Hhtar zF0N;(H<{?Q$j#QU$aFqm`Gv;~&`PVFT5M)In{Zz$*3C@eJYn<=v$>a?yPbS2b1MF( zM3y}(OyXu_x^d5a8w`P~bB<|thtpN46w4wvsA(y)RJlwRSwT%tnO;|!tR9k8o-`Go zD_|A^P^FaVb%o8cij+~U)S879RMRo1-J#X}KvC%B4()#-`EmAFcA9<2J{2U-MlLjz zq8yc|!&0>1YC-FjxE42H9XbWM4Qlse5OECQR*VXIk6}CR!rdYj?-Hr_0sK^u+@Sbx z@jE<>NAMUP#}jpU5>Md>j^cT|fEV!+-oT6?`v>?W1?71n&#E-#6_xH^Vk8?(pbvWI zQe@n!f3&q7qS!Gm+8ohLM)AoKeC#y{DI6>hotsy b@U0Z}dC(veW6vGZPjCLWbn%3ep8DoXn4E7n delta 2463 zcmd^BYj6|S6~5m}cK3m-H4kfL$wJ09#A{zG2(<{A)Ei{tbL0`^K(>?iTB$_c+$cIZW_J0JKzjNv;r`(D% z{Xj*XXoR54bz~8P{(#RL2-ecA0yo`UnooZWH3=8KOK&w~Q+=C<+LON$)Mv@PG6yoX zjr*6&S#t{|QSFW-<6n(UsP+t-ESLBsNliw!#e1WsB}?`PL`jW)X?u51YK><6~AaSP|q<)%a6TJtZui zLzzt;dafxC?Q}VQfc#q)(&(tXH^Xkra%kGxfu8uzfq4HOHBS^LRBm;-HMuj|-5+b+ z5shz-$D-@@bp_&u6($S=@j6KSAsXtHI>HcP?({K*+NY@$8OQKOTPFsdurL)b&w8$aRsiRgJ+?GXGwq{t~X4h$2 z_z@J-*vULP9}dx*@msWc_k$TuhnBT)ku0lBeO8hv(dzk>vyYkwTbyI~8VJYmbv%tz z_y)d(Z{vGBl|QBz24_=v`M7YI8Sv}>ST2B~ltYui1L>W|V*QaRKy z!84WGiYOd8Og^Q>G!K=X=gJW?%aan z^2*v!V^d4pgk8;?uFO=jXxnH$iJgT|$lvL2Q9dcc>1w9E#HT2q6ro}92bAnj6`^68 zU!`P!rUnh;f;xqs?|i}_I`g7(N6FqC=4TD8nQdeVwuc>N$JsONC3c>@%wA`g*l*d} zY?9q#x7i2mLv{yN*kFf>Ok|^wlS45|I62f~2^tVa8&+X8BG`Z)L=nSg?BK+(8-v(` zeK^Fa;V4EpHJmal>NL*cLIkgJc6g1m!*6f}S8)x0z~ArzKEyu-Nst9am?_K>9D*j4 z2tHxH5E2@NmZ|Usq`qT)YEDhU1{$U!&za9phP!E5Q**fGu4H7<&SP6R-q((mb}0EG zZ)P=ZLUU%j+=WV!$h%lgpIZ9dWg;gIRWO%Iz#Wu0J*bwcrO#c>`59A-1vbO>IUW; zIURRn6Si|Y-hl-AFogR}T7HOQf0(0hV1EKn;wj$Q-{JK9Tn(P*jr}rS!4L5x`~(;9 zGrWljT)zw9*3Tl0_bJik(Tls@2OSERn8aN`M|$ScV2@6V6J6P^p`p@1Rb@3N?dqxr zeBPnJ{F*ACw>nrw;|GIub(4ddduY! zOR!{`f;lC!3a$>uezjaHt5zfB6Hd*MB`=Z}OI*GuBe`5$tLRGTUb>*VjJu^CsHG)%79XMEmgwPva=YIYk=;{S0rW zZ&G4kDbn?_Hjjml_7qU4#{rT4+~ 30:\n self.removeDay(day=days[0])\n days = list(self.runs.keys())\n days.sort()\n\n def addHertzDataPoint(self, frequency):\n if frequency > 0:\n self.manageTime()\n try:\n self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"frequencies\"].append(frequency)\n except:\n self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"frequencies\"] = [frequency]\n\n def startRun(self):\n if self.checkRunning():\n self.endRun()\n self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"start\"] = time.time()\n\n def endRun(self):\n self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"] = time.time()\n self.currentRun += 1\n self.runs[self.todayString][\"run_\" + str(self.currentRun)] = {\"start\":0, \"end\": 0, \"frequencies\":[]} \n\n def checkRunning(self):\n if self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"start\"] and not self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"]:\n return True\n return False\n\n def addDay(self):\n self.today = dt.today()\n self.todayString = dt.strftime(self.today, \"%Y-%m-%d\")\n self.currentRun = 1\n self.runs[self.todayString] = {}\n self.runs[self.todayString][\"run_\" + str(self.currentRun)] = {\"start\":0, \"end\": 0, \"frequencies\":[]}\n\n def countRunsDay(self, day=None):\n if not day:\n day = self.todayString\n return len(self.runs[day].keys())\n\n def countRunsMultiDay(self, numDays=30):\n total_runs = 0\n for day in list(self.runs.keys()):\n total_runs += self.countRunsDay(day=day)\n return total_runs\n\n def calculateAverageHertzDay(self, day=None, returnArray=False):\n dayFrequencies = []\n if not day:\n day = self.todayString\n for run in list(self.runs[day].keys()):\n try:\n dayFrequencies += self.runs[day][run][\"frequencies\"]\n except Exception as e:\n print(\"{} missing frequency data for {}\".format(day,run))\n if returnArray:\n return dayFrequencies\n return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)\n\n def calculateAverageHertzMultiDay(self, numDays=30):\n self.manageTime()\n frequencies = []\n for day in list(self.runs.keys()):\n if not day == self.todayString and (dt.strptime(self.todayString, \"%Y-%m-%d\") - dt.strptime(day, \"%Y-%m-%d\")).days <= numDays:\n try:\n frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)\n except Exception as e:\n print(\"{} missing frequency data\".format(day))\n if len(frequencies):\n return round(math.fsum(frequencies)/len(frequencies), 2)\n return 0\n \n def calculateRunTimeDay(self, day=None, convertToHours=True):\n total_time = 0\n if not day:\n day = self.todayString\n for run in list(self.runs[day].keys()):\n total_time = self.runs[day][run][\"end\"] - self.runs[day][run][\"start\"] + total_time\n if convertToHours:\n return self.convertSecondstoHours(total_time)\n return total_time\n\n def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):\n total_time = 0\n for day in list(self.runs.keys()):\n if not day == self.todayString and (dt.strptime(self.todayString, \"%Y-%m-%d\") - dt.strptime(day, \"%Y-%m-%d\")).days <= numDays:\n total_time += self.calculateRunTimeDay(day=day, convertToHours=False)\n if convertToHours:\n return self.convertSecondstoHours(total_time)\n return total_time\n \n def calculateRunPercentDay(self, day=None, precise=False):\n if not day:\n day = self.todayString\n if precise:\n return (self.calculateRunTimeDay(day=day)/24) * 100\n return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)\n \n\n def calculateRunPercentMultiDay(self, numDays=30, precise=False):\n self.manageTime()\n if precise:\n return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100\n return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)\n\n def removeDay(self, day=None):\n if not day:\n raise Exception(\"Day can not be None\")\n print(\"removing day {}\".format(day))\n del self.runs[day]\n \n def convertSecondstoHours(self, seconds):\n return round(seconds / (60*60),2)\n\n def loadDataFromFile(self, filePath=\"/var/user/files/runtimestats.json\"):\n try:\n with open(filePath, \"r\") as f:\n temp = json.load(f)\n self.runs = temp[\"data\"]\n self.currentRun = temp[\"current_run\"]\n self.today = dt.strptime(temp[\"current_day\"], \"%Y-%m-%d\")\n self.todayString = temp[\"current_day\"]\n self.manageTime()\n except:\n print(\"Could not find file at {}\".format(filePath))\n print(\"creating file\")\n self.addDay()\n try:\n with open(filePath, \"w\") as f:\n d = {\n \"current_run\": self.currentRun,\n \"current_day\": self.todayString,\n \"data\": self.runs\n }\n json.dump(d, f, indent=4)\n except Exception as e:\n print(e)\n\n def saveDataToFile(self, filePath=\"/var/user/files/runtimestats.json\"):\n try:\n print(\"Saving Runs\")\n with open(filePath, \"w\") as f:\n d = {\n \"current_run\": self.currentRun,\n \"current_day\": self.todayString,\n \"data\": self.runs\n }\n json.dump(d, f, indent=4)\n except Exception as e:\n print(e)\n\nrts = RuntimeStats()\nrts.loadDataFromFile()\nrts.saveDataToFile()\n\ndef reboot(reason=\"Rebooting for config file update\"):\n #basic = Basic()\n logger.info(\"!\" * 10 + \"REBOOTING DEVICE\" + \"!\"*10)\n logger.info(reason)\n r = os.popen(\"kill -s SIGHUP `cat /var/run/python/supervisord.pid`\").read()\n logger.info(f\"REBOOT : {r}\")\n\ndef checkFileExist(filename):\n path = \"/var/user/files\"\n if not os.path.exists(path):\n logger.info(\"no folder making files folder in var/user\")\n os.makedirs(path)\n with open(path + \"/\" + filename, \"a\") as f:\n json.dump({}, f)\n if not os.path.exists(path + \"/\" + filename):\n logger.info(\"no creds file making creds file\")\n with open(path + \"/\" + filename, \"a\") as f:\n json.dump({}, f)\n\ndef convertDStoJSON(ds):\n j = dict()\n for x in ds:\n j[x[\"key\"]] = x[\"value\"]\n return j\n\ndef convertJSONtoDS(j):\n d = []\n for key in j.keys():\n d.append({\"key\": key, \"value\": j[key]})\n return d\n\ndef checkCredentialConfig():\n logger.info(\"CHECKING CONFIG\")\n cfgpath = \"/var/user/cfg/device_supervisor/device_supervisor.cfg\"\n credspath = \"/var/user/files/creds.json\"\n cfg = dict()\n with open(cfgpath, \"r\") as f:\n cfg = json.load(f)\n clouds = cfg.get(\"clouds\")\n logger.info(clouds)\n #if not configured then try to configure from stored values\n if clouds[0][\"args\"][\"clientId\"] == \"unknown\" or clouds[0][\"args\"][\"username\"] == \"unknown\" or not clouds[0][\"args\"][\"passwd\"] or clouds[0][\"args\"][\"passwd\"] == \"unknown\":\n checkFileExist(\"creds.json\")\n with open(credspath, \"r\") as c:\n creds = json.load(c)\n if creds:\n logger.info(\"updating config with stored data\")\n clouds[0][\"args\"][\"clientId\"] = creds[\"clientId\"]\n clouds[0][\"args\"][\"username\"] = creds[\"userName\"]\n clouds[0][\"args\"][\"passwd\"] = creds[\"password\"]\n cfg[\"clouds\"] = clouds\n cfg = checkParameterConfig(cfg)\n with open(cfgpath, \"w\", encoding='utf-8') as n:\n json.dump(cfg, n, indent=1, ensure_ascii=False)\n reboot()\n else:\n #assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data\n checkFileExist(\"creds.json\")\n with open(credspath, \"r\") as c:\n logger.info(\"updating stored file with new data\")\n cfg = checkParameterConfig(cfg)\n with open(cfgpath, \"w\", encoding='utf-8') as n:\n json.dump(cfg, n, indent=1, ensure_ascii=False)\n creds = json.load(c)\n if creds:\n if creds[\"clientId\"] != clouds[0][\"args\"][\"clientId\"]:\n creds[\"clientId\"] = clouds[0][\"args\"][\"clientId\"]\n if creds[\"userName\"] != clouds[0][\"args\"][\"username\"]:\n creds[\"userName\"] = clouds[0][\"args\"][\"username\"]\n if creds[\"password\"] != clouds[0][\"args\"][\"passwd\"]:\n creds[\"password\"] = clouds[0][\"args\"][\"passwd\"]\n else:\n creds[\"clientId\"] = clouds[0][\"args\"][\"clientId\"]\n creds[\"userName\"] = clouds[0][\"args\"][\"username\"]\n creds[\"password\"] = clouds[0][\"args\"][\"passwd\"]\n with open(credspath, \"w\") as cw:\n json.dump(creds,cw)\n\ndef checkParameterConfig(cfg):\n logger.info(\"Checking Parameters!!!!\")\n paramspath = \"/var/user/files/params.json\"\n cfgparams = convertDStoJSON(cfg.get(\"labels\"))\n #check stored values \n checkFileExist(\"params.json\")\n with open(paramspath, \"r\") as f:\n logger.info(\"Opened param storage file\")\n params = json.load(f)\n if params:\n if cfgparams != params:\n #go through each param\n #if not \"unknown\" and cfg and params aren't the same take from cfg likely updated manually\n #if key in cfg but not in params copy to params\n logger.info(\"equalizing params between cfg and stored\")\n for key in cfgparams.keys():\n try:\n if cfgparams[key] != params[key] and cfgparams[key] != \"unknown\":\n params[key] = cfgparams[key]\n except:\n params[key] = cfgparams[key]\n cfg[\"labels\"] = convertJSONtoDS(params)\n _set_global_args(convertJSONtoDS(params))\n with open(paramspath, \"w\") as p:\n json.dump(params, p)\n else:\n with open(paramspath, \"w\") as p:\n logger.info(\"initializing param file with params in memory\")\n json.dump(convertDStoJSON(get_params()), p)\n cfg[\"labels\"] = get_params()\n \n return cfg\n\n\n\nlwtData = {\n \"init\":False,\n \"client\": client.Client(client_id=str(uuid.uuid4()), clean_session=True, userdata=None, protocol=client.MQTTv311, transport=\"tcp\")\n}\ndef lwt(mac):\n try:\n #if not lwtData[\"connected\"]:\n if not lwtData[\"init\"]:\n logger.info(\"INITIALIZING LWT CLIENT\")\n lwtData[\"client\"].username_pw_set(username=\"admin\", password=\"columbus\")\n lwtData[\"client\"].will_set(\"meshify/db/194/_/mainHP/\" + mac + \":00:00/connected\",json.dumps({\"value\":False}))\n lwtData[\"init\"] = True\n logger.info(\"Connecting to MQTT Broker for LWT purposes!!!!!!!\")\n lwtData[\"client\"].connect(\"mq194.imistaway.net\",1883, 600)\n lwtData[\"client\"].publish(\"meshify/db/194/_/mainHP/\" + mac + \":00:00/connected\", json.dumps({\"value\":True}))\n except Exception as e:\n logger.error(\"LWT DID NOT DO THE THING\")\n logger.error(e)\n\ndef sendData(message):\n #logger.debug(message)\n mac = __topic__.split(\"/\")[-1] #':'.join(re.findall('..', '%012x' % uuid.getnode()))\n lwt(mac)\n checkCredentialConfig()\n for measure in message[\"measures\"]:\n try:\n logger.debug(measure)\n now = (round(dt.timestamp(dt.now())/600)*600)\n if abs(now - measure[\"timestamp\"]) > 7200:\n reboot(reason=\"Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}\".format(now,measure[\"timestamp\"]))\n if measure[\"name\"] in [\"wellstatus\",\"pidcontrolmode\",\"downholesensorstatus\",\"alarmflowrate\",\"alarmintakepressure\",\"alarmintaketemperature\",\"alarmtubingpressure\",\"alarmvfd\",\"alarmlockout\",\"alarmfluidlevel\",\"runpermissive\",\"startpermissive\",\"last_vfd_fault_code\",\"vfd_fault\"]:\n logger.debug(\"Converting DINT/BOOL to STRING\")\n value = convert_int(measure[\"name\"], measure[\"value\"])\n logger.debug(\"Converted {} to {}\".format(measure[\"value\"], value))\n publish(__topic__ + \":01:99/\" + measure[\"name\"], json.dumps({\"value\": value}), __qos__)\n else:\n publish(__topic__ + \":01:99/\" + measure[\"name\"], json.dumps({\"value\": measure[\"value\"]}), __qos__)\n \n if measure[\"name\"] == \"wellstatus\":\n if measure[\"value\"] == 0 and not rts.runs[rts.todayString][\"run_\" + str(rts.currentRun)][\"start\"]:\n rts.startRun()\n rts.saveDataToFile()\n elif measure[\"value\"] > 0 and rts.runs[rts.todayString][\"run_\" + str(rts.currentRun)][\"start\"] and not rts.runs[rts.todayString][\"run_\" + str(rts.currentRun)][\"end\"]:\n rts.endRun()\n rts.saveDataToFile()\n publish(__topic__ + \":01:99/\" + \"percentRunTime30Days\", json.dumps({\"value\": rts.calculateRunPercentMultiDay()}), __qos__)\n \n if measure[\"name\"] == \"vfdfrequency\":\n if measure[\"value\"] > 0:\n rts.addHertzDataPoint(measure[\"value\"])\n rts.saveDataToFile()\n publish(__topic__ + \":01:99/\" + \"avgFrequency30Days\", json.dumps({\"value\": rts.calculateAverageHertzMultiDay()}), __qos__)\n except Exception as e:\n logger.error(e)\n \n #publish(__topic__, json.dumps({measure[\"name\"]: measure[\"value\"]}), __qos__)\n\ndef convert_int(plc_tag, value):\n well_status_codes = {\n 0: \"Running\",\n 1: \"Pumped Off\",\n 2: \"Alarmed\",\n 3: \"Locked Out\",\n 4: \"Stopped\"\n }\n\n pid_control_codes = {\n 0: \"Flow\",\n 1: \"Fluid Level\",\n 2: \"Tubing Pressure\",\n 3: \"Manual\"\n }\n\n downhole_codes = {\n 0: \"OK\",\n 1: \"Connecting\",\n 2: \"Open Circuit\",\n 3: \"Shorted\",\n 4: \"Cannot Decode\"\n }\n\n permissive_codes = {\n 0: \"OK\",\n 1: \"Flow\",\n 2: \"Intake Pressure\",\n 3: \"Intake Temperature\",\n 4: \"Tubing Pressure\",\n 5: \"VFD\",\n 6: \"Fluid Level\",\n 7: \"Min. Downtime\"\n }\n\n alarm_codes = {\n 0: \"OK\",\n 1: \"Alarm\"\n }\n\n alarm_vfd_codes = {\n 0: \"OK\",\n 1: \"Locked Out\"\n }\n\n vfd_fault_codes = {\n 0: \"No Fault\",\n 2: \"Auxiliary Input\",\n 3: \"Power Loss\",\n 4: \"UnderVoltage\",\n 5: \"OverVoltage\",\n 7: \"Motor Overload\",\n 8: \"Heatsink OverTemp\",\n 9: \"Thermister OverTemp\",\n 10: \"Dynamic Brake OverTemp\",\n 12: \"Hardware OverCurrent\",\n 13: \"Ground Fault\",\n 14: \"Ground Warning\",\n 15: \"Load Loss\",\n 17: \"Input Phase Loss\",\n 18: \"Motor PTC Trip\",\n 19: \"Task Overrun\",\n 20: \"Torque Prove Speed Band\",\n 21: \"Output Phase Loss\",\n 24: \"Decel Inhibit\",\n 25: \"OverSpeed Limit\",\n 26: \"Brake Slipped\",\n 27: \"Torque Prove Conflict\",\n 28: \"TP Encls Confict\",\n 29: \"Analog In Loss\",\n 33: \"Auto Restarts Exhausted\",\n 35: \"IPM OverCurrent\",\n 36: \"SW OverCurrent\",\n 38: \"Phase U to Ground\",\n 39: \"Phase V to Ground\",\n 40: \"Phase W to Ground\",\n 41: \"Phase UV Short\",\n 42: \"Phase VW Short\",\n 43: \"Phase WU Short\",\n 44: \"Phase UNeg to Ground\",\n 45: \"Phase VNeg to Ground\",\n 46: \"Phase WNeg to Ground\",\n 48: \"System Defaulted\",\n 49: \"Drive Powerup\",\n 51: \"Clear Fault Queue\",\n 55: \"Control Board Overtemp\",\n 59: \"Invalid Code\",\n 61: \"Shear Pin 1\",\n 62: \"Shear Pin 2\",\n 64: \"Drive Overload\",\n 66: \"OW Torque Level\",\n 67: \"Pump Off\",\n 71: \"Port 1 Adapter\",\n 72: \"Port 2 Adapter\",\n 73: \"Port 3 Adapter\",\n 74: \"Port 4 Adapter\",\n 75: \"Port 5 Adapter\",\n 76: \"Port 6 Adapter\",\n 77: \"IR Volts Range\",\n 78: \"FluxAmps Ref Range\",\n 79: \"Excessive Load\",\n 80: \"AutoTune Aborted\",\n 81: \"Port 1 DPI Loss\",\n 82: \"Port 2 DPI Loss\",\n 83: \"Port 3 DPI Loss\",\n 84: \"Port 4 DPI Loss\",\n 85: \"Port 5 DPI Loss\",\n 86: \"Port 6 DPI Loss\",\n 87: \"IXo Voltage Range\",\n 91: \"Primary Velocity Feedback Loss\",\n 93: \"Hardware Enable Check\",\n 94: \"Alternate Velocity Feedback Loss\",\n 95: \"Auxiliary Velocity Feedback Loss\",\n 96: \"Position Feedback Loss\",\n 97: \"Auto Tach Switch\",\n 100: \"Parameter Checksum\",\n 101: \"Power Down NVS Blank\",\n 102: \"NVS Not Blank\",\n 103: \"Power Down NVS Incompatible\",\n 104: \"Power Board Checksum\",\n 106: \"Incompat MCB-PB\",\n 107: \"Replaced MCB-PB\",\n 108: \"Analog Calibration Checksum\",\n 110: \"Invalid Power Board Data\",\n 111: \"Power Board Invalid ID\",\n 112: \"Power Board App Min Version\",\n 113: \"Tracking DataError\",\n 115: \"Power Down Table Full\",\n 116: \"Power Down Entry Too Large\",\n 117: \"Power Down Data Checksum\",\n 118: \"Power Board Power Down Checksum\",\n 124: \"App ID Changed\",\n 125: \"Using Backup App\",\n 134: \"Start on Power Up\",\n 137: \"External Precharge Error\",\n 138: \"Precharge Open\",\n 141: \"Autotune Enc Angle\",\n 142: \"Autotune Speed Restricted\",\n 143: \"Autotune Current Regulator\",\n 144: \"Autotune Inertia\",\n 145: \"Autotune Travel\",\n 13035: \"Net IO Timeout\",\n 13037: \"Net IO Timeout\"\n\n }\n\n plc_tags = {\n \"wellstatus\": well_status_codes.get(value, \"Invalid Code\"),\n \"pidcontrolmode\": pid_control_codes.get(value, \"Invalid Code\"),\n \"downholesensorstatus\": downhole_codes.get(value, \"Invalid Code\"),\n \"alarmflowrate\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmintakepressure\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmintaketemperature\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmtubingpressure\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmvfd\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmlockout\": alarm_vfd_codes.get(value, \"Invalid Code\"),\n \"alarmfluidlevel\": alarm_codes.get(value, \"Invalid Code\"),\n \"runpermissive\": permissive_codes.get(value, \"Invalid Code\"),\n \"startpermissive\": permissive_codes.get(value, \"Invalid Code\"),\n \"last_vfd_fault_code\": vfd_fault_codes.get(value, \"Invalid Code\"),\n \"vfd_fault\": vfd_fault_codes.get(value, \"Invalid Code\")\n }\n\n return plc_tags.get(plc_tag, \"Invalid Tag\")\n\n ", + "msgType": 0, + "cloudName": "default" + }, + { + "qos": 1, + "funcName": "sendData", + "script": "import json, os, time, math, uuid\nfrom datetime import datetime as dt\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\nfrom quickfaas.global_dict import get as get_params\nfrom quickfaas.global_dict import _set_global_args\n\nclass RuntimeStats:\n def __init__(self):\n self.runs = {}\n self.currentRun = 0\n self.today = \"\"\n self.todayString = \"\"\n\n def manageTime(self):\n if self.todayString != dt.strftime(dt.today(), \"%Y-%m-%d\"):\n if self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"start\"] and not self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"]:\n self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"] = time.mktime(dt.strptime(self.todayString + \" 23:59:59\", \"%Y-%m-%d %H:%M:%S\").timetuple())\n self.addDay()\n self.today = dt.today()\n self.todayString = dt.strftime(self.today, \"%Y-%m-%d\")\n days = list(self.runs.keys())\n days.sort()\n while (dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days > 30:\n self.removeDay(day=days[0])\n days = list(self.runs.keys())\n days.sort()\n\n def addHertzDataPoint(self, frequency):\n if frequency > 0:\n self.manageTime()\n try:\n self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"frequencies\"].append(frequency)\n except:\n self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"frequencies\"] = [frequency]\n\n def startRun(self):\n if self.checkRunning():\n self.endRun()\n self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"start\"] = time.time()\n\n def endRun(self):\n self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"] = time.time()\n self.currentRun += 1\n self.runs[self.todayString][\"run_\" + str(self.currentRun)] = {\"start\":0, \"end\": 0, \"frequencies\":[]} \n\n def checkRunning(self):\n if self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"start\"] and not self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"]:\n return True\n return False\n\n def addDay(self):\n self.today = dt.today()\n self.todayString = dt.strftime(self.today, \"%Y-%m-%d\")\n self.currentRun = 1\n self.runs[self.todayString] = {}\n self.runs[self.todayString][\"run_\" + str(self.currentRun)] = {\"start\":0, \"end\": 0, \"frequencies\":[]}\n\n def countRunsDay(self, day=None):\n if not day:\n day = self.todayString\n return len(self.runs[day].keys())\n\n def countRunsMultiDay(self, numDays=30):\n total_runs = 0\n for day in list(self.runs.keys()):\n total_runs += self.countRunsDay(day=day)\n return total_runs\n\n def calculateAverageHertzDay(self, day=None, returnArray=False):\n dayFrequencies = []\n if not day:\n day = self.todayString\n for run in list(self.runs[day].keys()):\n try:\n dayFrequencies += self.runs[day][run][\"frequencies\"]\n except Exception as e:\n print(\"{} missing frequency data for {}\".format(day,run))\n if returnArray:\n return dayFrequencies\n return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)\n\n def calculateAverageHertzMultiDay(self, numDays=30):\n self.manageTime()\n frequencies = []\n for day in list(self.runs.keys()):\n if not day == self.todayString and (dt.strptime(self.todayString, \"%Y-%m-%d\") - dt.strptime(day, \"%Y-%m-%d\")).days <= numDays:\n try:\n frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)\n except Exception as e:\n print(\"{} missing frequency data\".format(day))\n if len(frequencies):\n return round(math.fsum(frequencies)/len(frequencies), 2)\n return 0\n \n def calculateRunTimeDay(self, day=None, convertToHours=True):\n total_time = 0\n if not day:\n day = self.todayString\n for run in list(self.runs[day].keys()):\n total_time = self.runs[day][run][\"end\"] - self.runs[day][run][\"start\"] + total_time\n if convertToHours:\n return self.convertSecondstoHours(total_time)\n return total_time\n\n def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):\n total_time = 0\n for day in list(self.runs.keys()):\n if not day == self.todayString and (dt.strptime(self.todayString, \"%Y-%m-%d\") - dt.strptime(day, \"%Y-%m-%d\")).days <= numDays:\n total_time += self.calculateRunTimeDay(day=day, convertToHours=False)\n if convertToHours:\n return self.convertSecondstoHours(total_time)\n return total_time\n \n def calculateRunPercentDay(self, day=None, precise=False):\n if not day:\n day = self.todayString\n if precise:\n return (self.calculateRunTimeDay(day=day)/24) * 100\n return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)\n \n\n def calculateRunPercentMultiDay(self, numDays=30, precise=False):\n self.manageTime()\n if precise:\n return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100\n return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)\n\n def removeDay(self, day=None):\n if not day:\n raise Exception(\"Day can not be None\")\n print(\"removing day {}\".format(day))\n del self.runs[day]\n \n def convertSecondstoHours(self, seconds):\n return round(seconds / (60*60),2)\n\n def loadDataFromFile(self, filePath=\"/var/user/files/runtimestats.json\"):\n try:\n with open(filePath, \"r\") as f:\n temp = json.load(f)\n self.runs = temp[\"data\"]\n self.currentRun = temp[\"current_run\"]\n self.today = dt.strptime(temp[\"current_day\"], \"%Y-%m-%d\")\n self.todayString = temp[\"current_day\"]\n self.manageTime()\n except:\n print(\"Could not find file at {}\".format(filePath))\n print(\"creating file\")\n self.addDay()\n try:\n with open(filePath, \"w\") as f:\n d = {\n \"current_run\": self.currentRun,\n \"current_day\": self.todayString,\n \"data\": self.runs\n }\n json.dump(d, f, indent=4)\n except Exception as e:\n print(e)\n\n def saveDataToFile(self, filePath=\"/var/user/files/runtimestats.json\"):\n try:\n print(\"Saving Runs\")\n with open(filePath, \"w\") as f:\n d = {\n \"current_run\": self.currentRun,\n \"current_day\": self.todayString,\n \"data\": self.runs\n }\n json.dump(d, f, indent=4)\n except Exception as e:\n print(e)\n\ntry:\n rts_path = \"/var/user/files/runtimestats_tb.json\"\n rts = RuntimeStats()\n rts.loadDataFromFile(filePath=rts_path)\n rts.saveDataToFile(filePath=rts_path)\nexcept Exception as e:\n logger.error(f\"Error in TB start RTS: {e}\" )\n\ndef reboot(reason=\"Rebooting for config file update\"):\n #basic = Basic()\n logger.info(\"!\" * 10 + \"REBOOTING DEVICE\" + \"!\"*10)\n logger.info(reason)\n r = os.popen(\"kill -s SIGHUP `cat /var/run/python/supervisord.pid`\").read()\n logger.info(f\"REBOOT : {r}\")\n\ndef checkFileExist(filename):\n path = \"/var/user/files\"\n if not os.path.exists(path):\n logger.debug(\"no folder making files folder in var/user\")\n os.makedirs(path)\n with open(path + \"/\" + filename, \"a\") as f:\n json.dump({}, f)\n if not os.path.exists(path + \"/\" + filename):\n logger.debug(\"no creds file making creds file\")\n with open(path + \"/\" + filename, \"a\") as f:\n json.dump({}, f)\n\ndef convertDStoJSON(ds):\n j = dict()\n for x in ds:\n j[x[\"key\"]] = x[\"value\"]\n return j\n\ndef convertJSONtoDS(j):\n d = []\n for key in j.keys():\n d.append({\"key\": key, \"value\": j[key]})\n return d\n\ndef checkCredentialConfig():\n logger.debug(\"CHECKING CONFIG\")\n cfgpath = \"/var/user/cfg/device_supervisor/device_supervisor.cfg\"\n credspath = \"/var/user/files/creds.json\"\n cfg = dict()\n with open(cfgpath, \"r\") as f:\n cfg = json.load(f)\n clouds = cfg.get(\"clouds\")\n logger.debug(clouds)\n #if not configured then try to configure from stored values\n if clouds[0][\"args\"][\"clientId\"] == \"unknown\" or clouds[0][\"args\"][\"username\"] == \"unknown\" or not clouds[0][\"args\"][\"passwd\"] or clouds[0][\"args\"][\"passwd\"] == \"unknown\":\n checkFileExist(\"creds.json\")\n with open(credspath, \"r\") as c:\n creds = json.load(c)\n if creds:\n logger.debug(\"updating config with stored data\")\n clouds[0][\"args\"][\"clientId\"] = creds[\"clientId\"]\n clouds[0][\"args\"][\"username\"] = creds[\"userName\"]\n clouds[0][\"args\"][\"passwd\"] = creds[\"password\"]\n cfg[\"clouds\"] = clouds\n cfg = checkParameterConfig(cfg)\n with open(cfgpath, \"w\", encoding='utf-8') as n:\n json.dump(cfg, n, indent=1, ensure_ascii=False)\n reboot()\n else:\n #assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data\n checkFileExist(\"creds.json\")\n with open(credspath, \"r\") as c:\n logger.debug(\"updating stored file with new data\")\n cfg = checkParameterConfig(cfg)\n with open(cfgpath, \"w\", encoding='utf-8') as n:\n json.dump(cfg, n, indent=1, ensure_ascii=False)\n creds = json.load(c)\n if creds:\n if creds[\"clientId\"] != clouds[0][\"args\"][\"clientId\"]:\n creds[\"clientId\"] = clouds[0][\"args\"][\"clientId\"]\n if creds[\"userName\"] != clouds[0][\"args\"][\"username\"]:\n creds[\"userName\"] = clouds[0][\"args\"][\"username\"]\n if creds[\"password\"] != clouds[0][\"args\"][\"passwd\"]:\n creds[\"password\"] = clouds[0][\"args\"][\"passwd\"]\n else:\n creds[\"clientId\"] = clouds[0][\"args\"][\"clientId\"]\n creds[\"userName\"] = clouds[0][\"args\"][\"username\"]\n creds[\"password\"] = clouds[0][\"args\"][\"passwd\"]\n with open(credspath, \"w\") as cw:\n json.dump(creds,cw)\n\ndef checkParameterConfig(cfg):\n logger.debug(\"Checking Parameters!!!!\")\n paramspath = \"/var/user/files/params.json\"\n cfgparams = convertDStoJSON(cfg.get(\"labels\"))\n #check stored values \n checkFileExist(\"params.json\")\n with open(paramspath, \"r\") as f:\n logger.debug(\"Opened param storage file\")\n params = json.load(f)\n if params:\n if cfgparams != params:\n #go through each param\n #if not \"unknown\" and cfg and params aren't the same take from cfg likely updated manually\n #if key in cfg but not in params copy to params\n logger.debug(\"equalizing params between cfg and stored\")\n for key in cfgparams.keys():\n try:\n if cfgparams[key] != params[key] and cfgparams[key] != \"unknown\":\n params[key] = cfgparams[key]\n except:\n params[key] = cfgparams[key]\n cfg[\"labels\"] = convertJSONtoDS(params)\n _set_global_args(convertJSONtoDS(params))\n with open(paramspath, \"w\") as p:\n json.dump(params, p)\n else:\n with open(paramspath, \"w\") as p:\n logger.debug(\"initializing param file with params in memory\")\n json.dump(convertDStoJSON(get_params()), p)\n cfg[\"labels\"] = get_params()\n \n return cfg\n\n# Helper function to split the payload into chunks\ndef chunk_payload(payload, chunk_size=20):\n chunked_values = list(payload[\"values\"].items())\n for i in range(0, len(chunked_values), chunk_size):\n yield {\n \"ts\": payload[\"ts\"],\n \"values\": dict(chunked_values[i:i+chunk_size])\n }\n\ndef sendData(message):\n #logger.debug(message)\n try:\n checkCredentialConfig()\n except Exception as e:\n logger.error(e)\n payload = {\"ts\": (round(dt.timestamp(dt.now())/600)*600)*1000, \"values\": {}}\n for measure in message[\"measures\"]:\n try:\n logger.debug(measure)\n if abs(payload[\"ts\"]/1000 - measure[\"timestamp\"]) > 3600:\n reboot(reason=\"Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}\".format(payload[\"ts\"]/1000,measure[\"timestamp\"]))\n if measure[\"name\"] in [\"wellstatus\",\"pidcontrolmode\",\"downholesensorstatus\",\"alarmflowrate\",\"alarmintakepressure\",\"alarmintaketemperature\",\"alarmtubingpressure\",\"alarmvfd\",\"alarmlockout\",\"alarmfluidlevel\",\"runpermissive\",\"startpermissive\",\"last_vfd_fault_code\",\"vfd_fault\", \"flowmeter_fault\"]:\n logger.debug(\"Converting DINT/BOOL to STRING\")\n value = convert_int(measure[\"name\"], measure[\"value\"])\n logger.debug(\"Converted {} to {}\".format(measure[\"value\"], value))\n payload[\"values\"][measure[\"name\"]] = value\n payload[\"values\"][measure[\"name\"] + \"_int\"] = measure[\"value\"]\n else:\n payload[\"values\"][measure[\"name\"]] = measure[\"value\"]\n if measure[\"name\"] == \"wellstatus\":\n if measure[\"value\"] == 0 and not rts.runs[rts.todayString][\"run_\" + str(rts.currentRun)][\"start\"]:\n rts.startRun()\n rts.saveDataToFile(filePath=rts_path)\n elif measure[\"value\"] > 0 and rts.runs[rts.todayString][\"run_\" + str(rts.currentRun)][\"start\"] and not rts.runs[rts.todayString][\"run_\" + str(rts.currentRun)][\"end\"]:\n rts.endRun()\n rts.saveDataToFile(filePath=rts_path)\n logger.info(f\"Adding {rts.calculateRunPercentMultiDay()} to payload as percentRunTime30Days\")\n payload[\"values\"][\"percentRunTime30Days\"] = rts.calculateRunPercentMultiDay()\n \n \n if measure[\"name\"] == \"vfdfrequency\":\n if measure[\"value\"] > 0:\n rts.addHertzDataPoint(measure[\"value\"])\n rts.saveDataToFile(filePath=rts_path)\n logger.info(f\"Adding {rts.calculateAverageHertzMultiDay()} to payload as avgFrequency30Days\")\n payload[\"values\"][\"avgFrequency30Days\"] = rts.calculateAverageHertzMultiDay()\n \n except Exception as e:\n logger.error(e)\n \n for chunk in chunk_payload(payload=payload):\n publish(__topic__, json.dumps(chunk), __qos__, cloud_name=\"ThingsBoard\")\n time.sleep(2)\n publish(\"v1/devices/me/attributes\", json.dumps({\"latestReportTime\": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__, cloud_name=\"ThingsBoard\")\n\ndef convert_int(plc_tag, value):\n well_status_codes = {\n 0: \"Running\",\n 1: \"Pumped Off\",\n 2: \"Alarmed\",\n 3: \"Locked Out\",\n 4: \"Stopped\"\n }\n\n pid_control_codes = {\n 0: \"Flow\",\n 1: \"Fluid Level\",\n 2: \"Tubing Pressure\",\n 3: \"Manual\"\n }\n\n downhole_codes = {\n 0: \"OK\",\n 1: \"Connecting\",\n 2: \"Open Circuit\",\n 3: \"Shorted\",\n 4: \"Cannot Decode\"\n }\n\n permissive_codes = {\n 0: \"OK\",\n 1: \"Flow\",\n 2: \"Intake Pressure\",\n 3: \"Intake Temperature\",\n 4: \"Tubing Pressure\",\n 5: \"VFD\",\n 6: \"Fluid Level\",\n 7: \"Min. Downtime\"\n }\n\n alarm_codes = {\n 0: \"OK\",\n 1: \"Alarm\"\n }\n\n alarm_vfd_codes = {\n 0: \"OK\",\n 1: \"Locked Out\"\n }\n\n vfd_fault_codes = {\n 0: \"No Fault\",\n 2: \"Auxiliary Input\",\n 3: \"Power Loss\",\n 4: \"UnderVoltage\",\n 5: \"OverVoltage\",\n 7: \"Motor Overload\",\n 8: \"Heatsink OverTemp\",\n 9: \"Thermister OverTemp\",\n 10: \"Dynamic Brake OverTemp\",\n 12: \"Hardware OverCurrent\",\n 13: \"Ground Fault\",\n 14: \"Ground Warning\",\n 15: \"Load Loss\",\n 17: \"Input Phase Loss\",\n 18: \"Motor PTC Trip\",\n 19: \"Task Overrun\",\n 20: \"Torque Prove Speed Band\",\n 21: \"Output Phase Loss\",\n 24: \"Decel Inhibit\",\n 25: \"OverSpeed Limit\",\n 26: \"Brake Slipped\",\n 27: \"Torque Prove Conflict\",\n 28: \"TP Encls Confict\",\n 29: \"Analog In Loss\",\n 33: \"Auto Restarts Exhausted\",\n 35: \"IPM OverCurrent\",\n 36: \"SW OverCurrent\",\n 38: \"Phase U to Ground\",\n 39: \"Phase V to Ground\",\n 40: \"Phase W to Ground\",\n 41: \"Phase UV Short\",\n 42: \"Phase VW Short\",\n 43: \"Phase WU Short\",\n 44: \"Phase UNeg to Ground\",\n 45: \"Phase VNeg to Ground\",\n 46: \"Phase WNeg to Ground\",\n 48: \"System Defaulted\",\n 49: \"Drive Powerup\",\n 51: \"Clear Fault Queue\",\n 55: \"Control Board Overtemp\",\n 59: \"Invalid Code\",\n 61: \"Shear Pin 1\",\n 62: \"Shear Pin 2\",\n 64: \"Drive Overload\",\n 66: \"OW Torque Level\",\n 67: \"Pump Off\",\n 71: \"Port 1 Adapter\",\n 72: \"Port 2 Adapter\",\n 73: \"Port 3 Adapter\",\n 74: \"Port 4 Adapter\",\n 75: \"Port 5 Adapter\",\n 76: \"Port 6 Adapter\",\n 77: \"IR Volts Range\",\n 78: \"FluxAmps Ref Range\",\n 79: \"Excessive Load\",\n 80: \"AutoTune Aborted\",\n 81: \"Port 1 DPI Loss\",\n 82: \"Port 2 DPI Loss\",\n 83: \"Port 3 DPI Loss\",\n 84: \"Port 4 DPI Loss\",\n 85: \"Port 5 DPI Loss\",\n 86: \"Port 6 DPI Loss\",\n 87: \"IXo Voltage Range\",\n 91: \"Primary Velocity Feedback Loss\",\n 93: \"Hardware Enable Check\",\n 94: \"Alternate Velocity Feedback Loss\",\n 95: \"Auxiliary Velocity Feedback Loss\",\n 96: \"Position Feedback Loss\",\n 97: \"Auto Tach Switch\",\n 100: \"Parameter Checksum\",\n 101: \"Power Down NVS Blank\",\n 102: \"NVS Not Blank\",\n 103: \"Power Down NVS Incompatible\",\n 104: \"Power Board Checksum\",\n 106: \"Incompat MCB-PB\",\n 107: \"Replaced MCB-PB\",\n 108: \"Analog Calibration Checksum\",\n 110: \"Invalid Power Board Data\",\n 111: \"Power Board Invalid ID\",\n 112: \"Power Board App Min Version\",\n 113: \"Tracking DataError\",\n 115: \"Power Down Table Full\",\n 116: \"Power Down Entry Too Large\",\n 117: \"Power Down Data Checksum\",\n 118: \"Power Board Power Down Checksum\",\n 124: \"App ID Changed\",\n 125: \"Using Backup App\",\n 134: \"Start on Power Up\",\n 137: \"External Precharge Error\",\n 138: \"Precharge Open\",\n 141: \"Autotune Enc Angle\",\n 142: \"Autotune Speed Restricted\",\n 143: \"Autotune Current Regulator\",\n 144: \"Autotune Inertia\",\n 145: \"Autotune Travel\",\n 13035: \"Net IO Timeout\",\n 13037: \"Net IO Timeout\"\n\n }\n\n plc_tags = {\n \"wellstatus\": well_status_codes.get(value, \"Invalid Code\"),\n \"pidcontrolmode\": pid_control_codes.get(value, \"Invalid Code\"),\n \"downholesensorstatus\": downhole_codes.get(value, \"Invalid Code\"),\n \"alarmflowrate\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmintakepressure\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmintaketemperature\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmtubingpressure\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmvfd\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmlockout\": alarm_vfd_codes.get(value, \"Invalid Code\"),\n \"alarmfluidlevel\": alarm_codes.get(value, \"Invalid Code\"),\n \"runpermissive\": permissive_codes.get(value, \"Invalid Code\"),\n \"startpermissive\": permissive_codes.get(value, \"Invalid Code\"),\n \"last_vfd_fault_code\": vfd_fault_codes.get(value, \"Invalid Code\"),\n \"vfd_fault\": vfd_fault_codes.get(value, \"Invalid Code\"),\n \"flowmeter_fault\": alarm_codes.get(value, \"Invalid Code\")\n }\n\n return plc_tags.get(plc_tag, \"Invalid Tag\")\n\n ", + "name": "sendData", + "trigger": "measure_event", + "topic": "v1/devices/me/telemetry", + "msgType": 0, + "cloudName": "ThingsBoard", + "groups": [ + "default" + ] + } + ], + "downloadFuncs": [ + { + "name": "Commands", + "topic": "meshify/sets/194/${MAC_UPPER}:01:99", + "qos": 1, + "funcName": "receiveCommand", + "payload_type": "Plaintext", + "script": "# Enter your python code.\nimport json\nfrom quickfaas.measure import recall\nfrom common.Logger import logger\n\ndef sync(mac,value, wizard_api):\n #get new values and send\n try:\n data = recall()#json.loads(recall().decode(\"utf-8\"))\n except Exception as e:\n logger.error(e)\n logger.info(data)\n for controller in data:\n for measure in controller[\"measures\"]:\n #publish measure\n topic = \"meshify/db/194/_/advvfdipp/\" + mac + \"/\" + measure[\"name\"]\n if measure[\"name\"] in [\"wellstatus\",\"pidcontrolmode\",\"downholesensorstatus\",\"alarmflowrate\",\"alarmintakepressure\",\"alarmintaketemperature\",\"alarmtubingpressure\",\"alarmvfd\",\"alarmlockout\",\"alarmfluidlevel\",\"runpermissive\",\"startpermissive\",\"last_vfd_fault_code\",\"vfd_fault\"]:\n payload = [{\"value\": convert_int(measure[\"name\"], measure[\"value\"])}]\n else:\n payload = [{\"value\": measure[\"value\"]}]\n logger.debug(\"Sending on topic: {}\".format(topic))\n logger.debug(\"Sending value: {}\".format(payload))\n wizard_api.mqtt_publish(topic, json.dumps(payload))\ndef writeplctag(mac, value, wizard_api):\n try:\n value = json.loads(value.replace(\"'\",'\"'))\n logger.debug(value)\n message = {\"advvfdipp\":{value[\"tag\"]: value[\"val\"]}}\n wizard_api.write_plc_values(message)\n except Exception as e:\n logger.debug(e)\n \ndef receiveCommand(topic, payload, wizard_api):\n logger.debug(topic)\n logger.debug(json.loads(payload))\n p = json.loads(payload)[0]\n command = p[\"payload\"][\"name\"].split(\".\")[1]\n commands = {\n \"sync\": sync,\n \"writeplctag\": writeplctag,\n }\n commands[command](p[\"mac\"].lower(),p[\"payload\"][\"value\"], wizard_api)\n #logger.debug(command)\n ack(p[\"msgId\"], p[\"mac\"], command, p[\"payload\"][\"name\"].split(\".\")[1], p[\"payload\"][\"value\"], wizard_api)\n\ndef ack(msgid, mac, name, command, value, wizard_api):\n #logger.debug(mac)\n macsquish = \"\".join(mac.split(\":\")[:-2])\n maclower = \":\".join(mac.split(\":\")[:-2])\n maclower = maclower.lower()\n #logger.debug(msgid)\n #logger.debug(mac)\n #logger.debug(name)\n #logger.debug(value)\n wizard_api.mqtt_publish(\"meshify/responses/\" + str(msgid), json.dumps([{\"value\": \"{} Success Setting: {} To: {}\".format(macsquish,name, value), \"msgid\": str(msgid)}]))\n wizard_api.mqtt_publish(\"meshify/db/194/_/mainMeshify/\" + maclower + \":00:00/commands\", json.dumps([{\"value\": {\"status\": \"success\", \"value\": str(value), \"channel\": command}, \"msgid\": str(msgid)}]))\n\ndef convert_int(plc_tag, value):\n well_status_codes = {\n 0: \"Running\",\n 1: \"Pumped Off\",\n 2: \"Alarmed\",\n 3: \"Locked Out\",\n 4: \"Stopped\"\n }\n\n pid_control_codes = {\n 0: \"Flow\",\n 1: \"Fluid Level\",\n 2: \"Tubing Pressure\",\n 3: \"Manual\"\n }\n\n downhole_codes = {\n 0: \"OK\",\n 1: \"Connecting\",\n 2: \"Open Circuit\",\n 3: \"Shorted\",\n 4: \"Cannot Decode\"\n }\n\n permissive_codes = {\n 0: \"OK\",\n 1: \"Flow\",\n 2: \"Intake Pressure\",\n 3: \"Intake Temperature\",\n 4: \"Tubing Pressure\",\n 5: \"VFD\",\n 6: \"Fluid Level\",\n 7: \"Min. Downtime\"\n }\n\n alarm_codes = {\n 0: \"OK\",\n 1: \"Alarm\"\n }\n\n alarm_vfd_codes = {\n 0: \"OK\",\n 1: \"Locked Out\"\n }\n\n vfd_fault_codes = {\n 0: \"No Fault\",\n 2: \"Auxiliary Input\",\n 3: \"Power Loss\",\n 4: \"UnderVoltage\",\n 5: \"OverVoltage\",\n 7: \"Motor Overload\",\n 8: \"Heatsink OverTemp\",\n 9: \"Thermister OverTemp\",\n 10: \"Dynamic Brake OverTemp\",\n 12: \"Hardware OverCurrent\",\n 13: \"Ground Fault\",\n 14: \"Ground Warning\",\n 15: \"Load Loss\",\n 17: \"Input Phase Loss\",\n 18: \"Motor PTC Trip\",\n 19: \"Task Overrun\",\n 20: \"Torque Prove Speed Band\",\n 21: \"Output Phase Loss\",\n 24: \"Decel Inhibit\",\n 25: \"OverSpeed Limit\",\n 26: \"Brake Slipped\",\n 27: \"Torque Prove Conflict\",\n 28: \"TP Encls Confict\",\n 29: \"Analog In Loss\",\n 33: \"Auto Restarts Exhausted\",\n 35: \"IPM OverCurrent\",\n 36: \"SW OverCurrent\",\n 38: \"Phase U to Ground\",\n 39: \"Phase V to Ground\",\n 40: \"Phase W to Ground\",\n 41: \"Phase UV Short\",\n 42: \"Phase VW Short\",\n 43: \"Phase WU Short\",\n 44: \"Phase UNeg to Ground\",\n 45: \"Phase VNeg to Ground\",\n 46: \"Phase WNeg to Ground\",\n 48: \"System Defaulted\",\n 49: \"Drive Powerup\",\n 51: \"Clear Fault Queue\",\n 55: \"Control Board Overtemp\",\n 59: \"Invalid Code\",\n 61: \"Shear Pin 1\",\n 62: \"Shear Pin 2\",\n 64: \"Drive Overload\",\n 66: \"OW Torque Level\",\n 67: \"Pump Off\",\n 71: \"Port 1 Adapter\",\n 72: \"Port 2 Adapter\",\n 73: \"Port 3 Adapter\",\n 74: \"Port 4 Adapter\",\n 75: \"Port 5 Adapter\",\n 76: \"Port 6 Adapter\",\n 77: \"IR Volts Range\",\n 78: \"FluxAmps Ref Range\",\n 79: \"Excessive Load\",\n 80: \"AutoTune Aborted\",\n 81: \"Port 1 DPI Loss\",\n 82: \"Port 2 DPI Loss\",\n 83: \"Port 3 DPI Loss\",\n 84: \"Port 4 DPI Loss\",\n 85: \"Port 5 DPI Loss\",\n 86: \"Port 6 DPI Loss\",\n 87: \"IXo Voltage Range\",\n 91: \"Primary Velocity Feedback Loss\",\n 93: \"Hardware Enable Check\",\n 94: \"Alternate Velocity Feedback Loss\",\n 95: \"Auxiliary Velocity Feedback Loss\",\n 96: \"Position Feedback Loss\",\n 97: \"Auto Tach Switch\",\n 100: \"Parameter Checksum\",\n 101: \"Power Down NVS Blank\",\n 102: \"NVS Not Blank\",\n 103: \"Power Down NVS Incompatible\",\n 104: \"Power Board Checksum\",\n 106: \"Incompat MCB-PB\",\n 107: \"Replaced MCB-PB\",\n 108: \"Analog Calibration Checksum\",\n 110: \"Invalid Power Board Data\",\n 111: \"Power Board Invalid ID\",\n 112: \"Power Board App Min Version\",\n 113: \"Tracking DataError\",\n 115: \"Power Down Table Full\",\n 116: \"Power Down Entry Too Large\",\n 117: \"Power Down Data Checksum\",\n 118: \"Power Board Power Down Checksum\",\n 124: \"App ID Changed\",\n 125: \"Using Backup App\",\n 134: \"Start on Power Up\",\n 137: \"External Precharge Error\",\n 138: \"Precharge Open\",\n 141: \"Autotune Enc Angle\",\n 142: \"Autotune Speed Restricted\",\n 143: \"Autotune Current Regulator\",\n 144: \"Autotune Inertia\",\n 145: \"Autotune Travel\",\n 13035: \"Net IO Timeout\",\n 13037: \"Net IO Timeout\"\n\n }\n\n plc_tags = {\n \"wellstatus\": well_status_codes.get(value, \"Invalid Code\"),\n \"pidcontrolmode\": pid_control_codes.get(value, \"Invalid Code\"),\n \"downholesensorstatus\": downhole_codes.get(value, \"Invalid Code\"),\n \"alarmflowrate\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmintakepressure\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmintaketemperature\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmtubingpressure\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmvfd\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmlockout\": alarm_vfd_codes.get(value, \"Invalid Code\"),\n \"alarmfluidlevel\": alarm_codes.get(value, \"Invalid Code\"),\n \"runpermissive\": permissive_codes.get(value, \"Invalid Code\"),\n \"startpermissive\": permissive_codes.get(value, \"Invalid Code\"),\n \"last_vfd_fault_code\": vfd_fault_codes.get(value, \"Invalid Code\"),\n \"vfd_fault\": vfd_fault_codes.get(value, \"Invalid Code\")\n }\n\n return plc_tags.get(plc_tag, \"Invalid Tag\")\n", + "msgType": 0, + "trigger": "command_event", + "cloudName": "default" + }, + { + "name": "receiveCommand", + "topic": "v1/devices/me/rpc/request/+", + "qos": 1, + "funcName": "receiveCommand", + "payload_type": "Plaintext", + "script": "import json, time\nfrom datetime import datetime as dt\nfrom quickfaas.measure import recall, write\nfrom quickfaas.remotebus import publish\nfrom common.Logger import logger\n\n# Helper function to split the payload into chunks\ndef chunk_payload(payload, chunk_size=20):\n chunked_values = list(payload[\"values\"].items())\n for i in range(0, len(chunked_values), chunk_size):\n yield {\n \"ts\": payload[\"ts\"],\n \"values\": dict(chunked_values[i:i+chunk_size])\n }\n\ndef sync():\n #get new values and send\n payload = {\"ts\": round(dt.timestamp(dt.now()))*1000, \"values\": {}}\n topic = \"v1/devices/me/telemetry\"\n try:\n data = recall()#json.loads(recall().decode(\"utf-8\"))\n except Exception as e:\n logger.error(e)\n logger.debug(data)\n for controller in data:\n for measure in controller[\"measures\"]:\n #publish measure\n if measure[\"name\"] in [\"wellstatus\",\"pidcontrolmode\",\"downholesensorstatus\",\"alarmflowrate\",\"alarmintakepressure\",\"alarmintaketemperature\",\"alarmtubingpressure\",\"alarmvfd\",\"alarmlockout\",\"alarmfluidlevel\",\"runpermissive\",\"startpermissive\",\"last_vfd_fault_code\",\"vfd_fault\", \"flowmeter_fault\"]:\n payload[\"values\"][measure[\"name\"]] = convert_int(measure[\"name\"], measure[\"value\"])\n payload[\"values\"][measure[\"name\"]+ \"_int\"] = measure[\"value\"]\n else:\n payload[\"values\"][measure[\"name\"]] = measure[\"value\"]\n logger.debug(\"Sending on topic: {}\".format(topic))\n logger.debug(\"Sending value: {}\".format(payload))\n for chunk in chunk_payload(payload=payload):\n publish(topic, json.dumps(chunk), 1, cloud_name=\"ThingsBoard\")\n time.sleep(2)\ndef writeplctag(value):\n #value in the form {\"measurement\": , \"value\": }\n try:\n #value = json.loads(value.replace(\"'\",'\"'))\n logger.debug(value)\n #payload format: [{\"name\": \"advvfdipp\", \"measures\": [{\"name\": \"manualfrequencysetpoint\", \"value\": 49}]}]\n message = [{\"name\": \"advvfdipp\", \"measures\":[{\"name\":value[\"measurement\"], \"value\": value[\"value\"]}]}]\n resp = write(message) \n logger.debug(\"RETURN FROM WRITE: {}\".format(resp))\n return True\n except Exception as e:\n logger.debug(e)\n return False\n \ndef receiveCommand(topic, payload):\n try:\n logger.debug(topic)\n logger.debug(json.loads(payload))\n p = json.loads(payload)\n command = p[\"method\"]\n commands = {\n \"sync\": sync,\n \"writeplctag\": writeplctag,\n } \n if command == \"setPLCTag\":\n try:\n result = commands[\"writeplctag\"](p[\"params\"])\n logger.debug(result)\n except Exception as e:\n logger.error(e)\n elif command == \"changeSetpoint\":\n try:\n logger.debug(\"attempting controlpoint write\")\n params_type = {\"measurement\": \"pidcontrolmode\", \"value\": p[\"params\"][\"setpointType\"]}\n if params_type[\"value\"]:\n commands[\"writeplctag\"](params_type)\n time.sleep(2)\n except Exception as e:\n logger.error(\"DID NOT WRITE CONTROL MODE\")\n logger.error(e)\n try:\n logger.debug(\"attempting setpoint write\")\n modes = {0: \"flowsetpoint\", 1: \"fluidlevelsetpoint\", 2: \"tubingpressuresetpoint\", 3: \"manualfrequencysetpoint\"}\n params_value = {\"value\": p[\"params\"][\"setpointValue\"]}\n if params_value[\"value\"]:\n params_value[\"measurement\"] = modes[getMode()]\n result = commands[\"writeplctag\"](params_value)\n logger.debug(result)\n except Exception as e:\n logger.error(\"DID NOT WRITE SETPOINT\")\n logger.error(e)\n #logger.debug(command)\n ack(topic.split(\"/\")[-1])\n time.sleep(5)\n sync()\n except Exception as e:\n logger.debug(e)\n \n\ndef ack(msgid):\n #logger.debug(msgid)\n #logger.debug(mac)\n #logger.debug(name)\n #logger.debug(value)\n publish(\"v1/devices/me/rpc/response/\" + str(msgid), json.dumps({\"msg\": {\"time\": time.time()}, \"metadata\": \"\", \"msgType\": \"\"}), 1,cloud_name=\"ThingsBoard\")\n\ndef getMode():\n try:\n data = recall()\n for controller in data:\n for measure in controller[\"measures\"]:\n if measure[\"name\"] == \"pidcontrolmode\":\n return measure[\"value\"]\n except:\n return None\n\ndef convert_int(plc_tag, value):\n well_status_codes = {\n 0: \"Running\",\n 1: \"Pumped Off\",\n 2: \"Alarmed\",\n 3: \"Locked Out\",\n 4: \"Stopped\"\n }\n\n pid_control_codes = {\n 0: \"Flow\",\n 1: \"Fluid Level\",\n 2: \"Tubing Pressure\",\n 3: \"Manual\"\n }\n\n downhole_codes = {\n 0: \"OK\",\n 1: \"Connecting\",\n 2: \"Open Circuit\",\n 3: \"Shorted\",\n 4: \"Cannot Decode\"\n }\n\n permissive_codes = {\n 0: \"OK\",\n 1: \"Flow\",\n 2: \"Intake Pressure\",\n 3: \"Intake Temperature\",\n 4: \"Tubing Pressure\",\n 5: \"VFD\",\n 6: \"Fluid Level\",\n 7: \"Min. Downtime\"\n }\n\n alarm_codes = {\n 0: \"OK\",\n 1: \"Alarm\"\n }\n\n alarm_vfd_codes = {\n 0: \"OK\",\n 1: \"Locked Out\"\n }\n\n vfd_fault_codes = {\n 0: \"No Fault\",\n 2: \"Auxiliary Input\",\n 3: \"Power Loss\",\n 4: \"UnderVoltage\",\n 5: \"OverVoltage\",\n 7: \"Motor Overload\",\n 8: \"Heatsink OverTemp\",\n 9: \"Thermister OverTemp\",\n 10: \"Dynamic Brake OverTemp\",\n 12: \"Hardware OverCurrent\",\n 13: \"Ground Fault\",\n 14: \"Ground Warning\",\n 15: \"Load Loss\",\n 17: \"Input Phase Loss\",\n 18: \"Motor PTC Trip\",\n 19: \"Task Overrun\",\n 20: \"Torque Prove Speed Band\",\n 21: \"Output Phase Loss\",\n 24: \"Decel Inhibit\",\n 25: \"OverSpeed Limit\",\n 26: \"Brake Slipped\",\n 27: \"Torque Prove Conflict\",\n 28: \"TP Encls Confict\",\n 29: \"Analog In Loss\",\n 33: \"Auto Restarts Exhausted\",\n 35: \"IPM OverCurrent\",\n 36: \"SW OverCurrent\",\n 38: \"Phase U to Ground\",\n 39: \"Phase V to Ground\",\n 40: \"Phase W to Ground\",\n 41: \"Phase UV Short\",\n 42: \"Phase VW Short\",\n 43: \"Phase WU Short\",\n 44: \"Phase UNeg to Ground\",\n 45: \"Phase VNeg to Ground\",\n 46: \"Phase WNeg to Ground\",\n 48: \"System Defaulted\",\n 49: \"Drive Powerup\",\n 51: \"Clear Fault Queue\",\n 55: \"Control Board Overtemp\",\n 59: \"Invalid Code\",\n 61: \"Shear Pin 1\",\n 62: \"Shear Pin 2\",\n 64: \"Drive Overload\",\n 66: \"OW Torque Level\",\n 67: \"Pump Off\",\n 71: \"Port 1 Adapter\",\n 72: \"Port 2 Adapter\",\n 73: \"Port 3 Adapter\",\n 74: \"Port 4 Adapter\",\n 75: \"Port 5 Adapter\",\n 76: \"Port 6 Adapter\",\n 77: \"IR Volts Range\",\n 78: \"FluxAmps Ref Range\",\n 79: \"Excessive Load\",\n 80: \"AutoTune Aborted\",\n 81: \"Port 1 DPI Loss\",\n 82: \"Port 2 DPI Loss\",\n 83: \"Port 3 DPI Loss\",\n 84: \"Port 4 DPI Loss\",\n 85: \"Port 5 DPI Loss\",\n 86: \"Port 6 DPI Loss\",\n 87: \"IXo Voltage Range\",\n 91: \"Primary Velocity Feedback Loss\",\n 93: \"Hardware Enable Check\",\n 94: \"Alternate Velocity Feedback Loss\",\n 95: \"Auxiliary Velocity Feedback Loss\",\n 96: \"Position Feedback Loss\",\n 97: \"Auto Tach Switch\",\n 100: \"Parameter Checksum\",\n 101: \"Power Down NVS Blank\",\n 102: \"NVS Not Blank\",\n 103: \"Power Down NVS Incompatible\",\n 104: \"Power Board Checksum\",\n 106: \"Incompat MCB-PB\",\n 107: \"Replaced MCB-PB\",\n 108: \"Analog Calibration Checksum\",\n 110: \"Invalid Power Board Data\",\n 111: \"Power Board Invalid ID\",\n 112: \"Power Board App Min Version\",\n 113: \"Tracking DataError\",\n 115: \"Power Down Table Full\",\n 116: \"Power Down Entry Too Large\",\n 117: \"Power Down Data Checksum\",\n 118: \"Power Board Power Down Checksum\",\n 124: \"App ID Changed\",\n 125: \"Using Backup App\",\n 134: \"Start on Power Up\",\n 137: \"External Precharge Error\",\n 138: \"Precharge Open\",\n 141: \"Autotune Enc Angle\",\n 142: \"Autotune Speed Restricted\",\n 143: \"Autotune Current Regulator\",\n 144: \"Autotune Inertia\",\n 145: \"Autotune Travel\",\n 13035: \"Net IO Timeout\",\n 13037: \"Net IO Timeout\"\n\n }\n\n plc_tags = {\n \"wellstatus\": well_status_codes.get(value, \"Invalid Code\"),\n \"pidcontrolmode\": pid_control_codes.get(value, \"Invalid Code\"),\n \"downholesensorstatus\": downhole_codes.get(value, \"Invalid Code\"),\n \"alarmflowrate\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmintakepressure\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmintaketemperature\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmtubingpressure\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmvfd\": alarm_codes.get(value, \"Invalid Code\"),\n \"alarmlockout\": alarm_vfd_codes.get(value, \"Invalid Code\"),\n \"alarmfluidlevel\": alarm_codes.get(value, \"Invalid Code\"),\n \"runpermissive\": permissive_codes.get(value, \"Invalid Code\"),\n \"startpermissive\": permissive_codes.get(value, \"Invalid Code\"),\n \"last_vfd_fault_code\": vfd_fault_codes.get(value, \"Invalid Code\"),\n \"vfd_fault\": vfd_fault_codes.get(value, \"Invalid Code\"),\n \"flowmeter_fault\": alarm_codes.get(value, \"Invalid Code\")\n }\n\n return plc_tags.get(plc_tag, \"Invalid Tag\")\n", + "msgType": 0, + "cloudName": "ThingsBoard", + "trigger": "command_event" + } + ] + }, + "labels": [ + { + "key": "SN", + "value": "GF5022403048023", + "_id": "000066cf56dda1a2" + }, + { + "key": "MAC", + "value": "ec:6e:79:02:4f:c6", + "_id": "000166cf56dd0ec0" + }, + { + "_id": "000266cf56f026bd", + "key": "MAC_UPPER", + "value": "C4:93:00:0D:E0:EF" + }, + { + "_id": "000366cf56fe955d", + "key": "MAC_LOWER", + "value": "c4:93:00:0d:e0:ef" + } + ], + "modbusSlave": { + "enable": 0, + "protocol": "Modbus-TCP", + "port": 502, + "slaveAddr": 1, + "int16Ord": "ab", + "int32Ord": "abcd", + "float32Ord": "abcd", + "maxConnection": 5, + "useRawvalue": 1, + "mapping_table": [] + }, + "modbusRTUSlave": { + "enable": 0, + "protocol": "Modbus-RTU", + "coms": "rs485", + "slaveAddr": 1, + "int16Ord": "ab", + "int32Ord": "abcd", + "float32Ord": "abcd", + "mapping_table": [] + }, + "iec104Server": { + "enable": 0, + "cotSize": 2, + "port": 2404, + "serverList": [ + { + "asduAddr": 1 + } + ], + "kValue": 12, + "wValue": 8, + "t0": 15, + "t1": 15, + "t2": 10, + "t3": 20, + "maximumLink": 5, + "timeSet": 1, + "byteOrder": "abcd", + "useRawvalue": 1, + "asduLen": 2, + "mapping_table": [] + }, + "iec101Server": { + "enable": 0, + "coms": "rs485", + "mode": "UnBalance", + "protocolMode": 0, + "linkLen": 2, + "linkAddr": 1, + "asduLen": 2, + "ioaLen": 3, + "cotLen": 2, + "serverList": [ + { + "asduAddr": 1 + } + ], + "linkTimeOut": 2000, + "timeSet": 1, + "idleTimeOut": 10000, + "byteOrder": "abcd", + "useRawvalue": 1, + "mappingTable": { + "YX": [], + "YC": [], + "YK": [] + } + }, + "iec104Client": { + "enable": 0, + "connectType": 2, + "serverAddr": "ipower.inhandcloud.cn", + "serverPort": 2406, + "communicationCode": "", + "protocol": 1, + "asduAddr": 1, + "tls": 1, + "verification": 1, + "mapping_table": { + "YX": [], + "YC": [], + "YK": [] + } + }, + "opcuaServer": { + "enable": 0, + "port": 4840, + "maximumLink": 5, + "securityMode": 0, + "identifierType": "String", + "certificate": "None", + "privateKey": "None", + "pubsub": 0, + "useRawvalue": 1, + "mapping_table": [] + }, + "sl651Slave": { + "enable": 0, + "centerAaddr": 1, + "remoteAddr": "", + "addrCode": "", + "password": "", + "platform_list": [], + "useRawvalue": 1, + "mapping_table": [] + }, + "hj212Client": { + "enable": 0, + "useRawvalue": 1, + "platform_list": [], + "block_list": [], + "mapping_table": [] + }, + "bacnetServer": { + "enable": 0, + "protocol": "BACnet/IP", + "deviceId": 0, + "port": 47808, + "bbmdEnable": 0, + "useRawvalue": 1, + "mapping_table": [] + }, + "Dnp3Server": { + "enable": 0, + "protocol": "Dnp3-TCP", + "slaveAddr": 1, + "masterAddr": 2, + "port": 20000, + "useRawvalue": 1, + "enableUnsol": 0, + "maxFrasize": 4096, + "layerTimeout": 1000, + "linkRetry": 5, + "enableLink": 0, + "mapping_table": [] + }, + "southMetadata": {}, + "bindMetadata": { + "version": "", + "timestamp": "" + }, + "bindConfig": { + "enable": 0, + "bind": { + "modelId": "", + "modelName": "", + "srcId": "", + "srcName": "", + "devId": "", + "devName": "" + }, + "varGroups": [], + "variables": [], + "alerts": [] + }, + "templates": {}, + "version": "3.0.1" +} \ No newline at end of file diff --git a/Pub_Sub/rigpump/.DS_Store b/Pub_Sub/rigpump/.DS_Store index 43a933af9ccc78b7084bdb853a79a49b4cc2239f..07f8b33f00475fb8019e8840bbe360b578024f98 100644 GIT binary patch delta 18 ZcmZn(XbITBDa2?n`J=ehW)-0aq5wR61~>o! delta 18 ZcmZn(XbITBDa2?s`J=ehW)-0aq5wSL20{P; diff --git a/Pub_Sub/rigpump/thingsboard/pub/sendData.py b/Pub_Sub/rigpump/thingsboard/pub/sendData.py index 8f83ed3..2b3942a 100644 --- a/Pub_Sub/rigpump/thingsboard/pub/sendData.py +++ b/Pub_Sub/rigpump/thingsboard/pub/sendData.py @@ -47,8 +47,6 @@ def chunk_payload(payload, chunk_size=20): def sendData(message): #logger.debug(message) - - payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}} attributes_payload = {} for measure in message["measures"]: diff --git a/Pub_Sub/transferlite/thingsboard/pub/sendData.py b/Pub_Sub/transferlite/thingsboard/pub/sendData.py new file mode 100644 index 0000000..a51cc07 --- /dev/null +++ b/Pub_Sub/transferlite/thingsboard/pub/sendData.py @@ -0,0 +1,89 @@ +# Enter your python code. +import json, time +from datetime import datetime as dt +from common.Logger import logger +from quickfaas.remotebus import publish + +report_period = 600 + +def chunk_payload(payload, chunk_size=20): + if "values" in payload: + # Original format: {"ts": ..., "values": {...}} + chunked_values = list(payload["values"].items()) + for i in range(0, len(chunked_values), chunk_size): + yield { + "ts": payload["ts"], + "values": dict(chunked_values[i:i+chunk_size]) + } + else: + # New format: {"key1": "value1", "key2": "value2"} + chunked_keys = list(payload.keys()) + for i in range(0, len(chunked_keys), chunk_size): + yield {k: payload[k] for k in chunked_keys[i:i+chunk_size]} + +def sendData(message): + #logger.debug(message) + payload = {"ts": (round(dt.timestamp(dt.now())/report_period)*report_period)*1000, "values": {}} + attributes_payload = {} + for measure in message["measures"]: + try: + logger.debug(measure) + if measure["health"] == 1: + if measure["name"] in ["auto_manual", "system1_hasleveltransmitter", "system2_hasleveltransmitter", "state_supervisor", "state_system1", "state_system2"]: + logger.debug("Converting DINT/BOOL to STRING") + payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"]) + payload["values"][measure["name"] + "_int"] = measure["value"] + else: + payload["values"][measure["name"]] = measure["value"] + except Exception as e: + logger.error(e) + + for chunk in chunk_payload(payload=payload): + publish(__topic__, json.dumps(chunk), __qos__, cloud_name="ThingsBoard") + time.sleep(2) + + attributes_payload["latestReportTime"] = (round(dt.timestamp(dt.now())/report_period)*report_period)*1000 + for chunk in chunk_payload(payload=attributes_payload): + publish("v1/devices/me/attributes", json.dumps(chunk), __qos__, cloud_name="ThingsBoard") + time.sleep(2) + +def convert_int(plc_tag, value): + + + TRUE_FALSE = { + 0: "false", + 1: "true" + } + + AUTO_MANUAL = { + 0: "Auto", + 1: "Manual" + } + + PHASE_STATES = { + 1: "Running", + 2: "Holding", + 4: "Restarting", + 8: "Stopping", + 16: "Aborting", + 32: "Resetting", + 64: "Idle", + 128: "Held", + 256: "Complete", + 512: "Stopped", + 1024: "Aborted" + } + + + plc_tags = { + "auto_manual": AUTO_MANUAL.get(value, "Invalid Code"), + "system1_hasleveltransmitter": TRUE_FALSE.get(value, "Invalid Code"), + "system2_hasleveltransmitter": TRUE_FALSE.get(value, "Invalid Code"), + "state_supervisor": PHASE_STATES.get(value, "Invalid Code"), + "state_system1": PHASE_STATES.get(value, "Invalid Code"), + "state_system2": PHASE_STATES.get(value, "Invalid Code") + } + + return plc_tags.get(plc_tag, "Invalid Tag") + + \ No newline at end of file diff --git a/Pub_Sub/transferlite/thingsboard/sub/receiveCommand.py b/Pub_Sub/transferlite/thingsboard/sub/receiveCommand.py new file mode 100644 index 0000000..6426646 --- /dev/null +++ b/Pub_Sub/transferlite/thingsboard/sub/receiveCommand.py @@ -0,0 +1,135 @@ +import json, time, os +from datetime import datetime as dt +from quickfaas.measure import recall, write +from quickfaas.remotebus import publish +from common.Logger import logger + +# Helper function to split the payload into chunks +def chunk_payload(payload, chunk_size=20): + chunked_values = list(payload["values"].items()) + for i in range(0, len(chunked_values), chunk_size): + yield { + "ts": payload["ts"], + "values": dict(chunked_values[i:i+chunk_size]) + } + +def sync(): + #get new values and send + payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}} + topic = "v1/devices/me/telemetry" + try: + data = recall()#json.loads(recall().decode("utf-8")) + except Exception as e: + logger.error(e) + logger.debug(data) + for controller in data: + for measure in controller["measures"]: + #publish measure + if measure["name"] in ["auto_manual", "system1_hasleveltransmitter", "system2_hasleveltransmitter", "state_supervisor", "state_system1", "state_system2"]: + payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"]) + payload["values"][measure["name"]+ "_int"] = measure["value"] + else: + payload["values"][measure["name"]] = measure["value"] + logger.debug("Sending on topic: {}".format(topic)) + logger.debug("Sending value: {}".format(payload)) + for chunk in chunk_payload(payload=payload): + publish(topic, json.dumps(chunk), 1, cloud_name="ThingsBoard") + time.sleep(2) + +def writeplctag(value): + #value in the form {"measurement": , "value": } + try: + #value = json.loads(value.replace("'",'"')) + logger.debug(value) + #payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}] + message = [{"name": "transferlite", "measures":[{"name":value["measurement"], "value": value["value"]}]}] + resp = write(message) + logger.debug("RETURN FROM WRITE: {}".format(resp)) + return True + except Exception as e: + logger.debug(e) + return False + +def reboot(): + #basic = Basic() + logger.info("!" * 10 + "REBOOTING DEVICE FROM CLOUD" + "!"*10) + r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read() + logger.info(f"REBOOT : {r}") + +def receiveCommand(topic, payload): + try: + logger.debug(topic) + logger.debug(json.loads(payload)) + p = json.loads(payload) + command = p["method"] + commands = { + "sync": sync, + "writeplctag": writeplctag, + "reboot": reboot + } + if command == "setPLCTag": + try: + result = commands["writeplctag"](p["params"]) + logger.debug(result) + except Exception as e: + logger.error(e) + elif command == "reboot": + try: + ack(topic.split("/")[-1]) + commands["reboot"]() + except Exception as e: + logger.error(e) + #logger.debug(command) + ack(topic.split("/")[-1]) + time.sleep(2) + sync() + except Exception as e: + logger.debug(e) + +def ack(msgid): + #logger.debug(msgid) + #logger.debug(mac) + #logger.debug(name) + #logger.debug(value) + publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}), 1) + +def convert_int(plc_tag, value): + + + TRUE_FALSE = { + 0: "false", + 1: "true" + } + + AUTO_MANUAL = { + 0: "Auto", + 1: "Manual" + } + + PHASE_STATES = { + 1: "Running", + 2: "Holding", + 4: "Restarting", + 8: "Stopping", + 16: "Aborting", + 32: "Resetting", + 64: "Idle", + 128: "Held", + 256: "Complete", + 512: "Stopped", + 1024: "Aborted" + } + + + plc_tags = { + "auto_manual": AUTO_MANUAL.get(value, "Invalid Code"), + "system1_hasleveltransmitter": TRUE_FALSE.get(value, "Invalid Code"), + "system2_hasleveltransmitter": TRUE_FALSE.get(value, "Invalid Code"), + "state_supervisor": PHASE_STATES.get(value, "Invalid Code"), + "state_system1": PHASE_STATES.get(value, "Invalid Code"), + "state_system2": PHASE_STATES.get(value, "Invalid Code") + } + + return plc_tags.get(plc_tag, "Invalid Tag") + + \ No newline at end of file diff --git a/device.csv b/device.csv new file mode 100644 index 0000000..64e7152 --- /dev/null +++ b/device.csv @@ -0,0 +1,6 @@ +name,type,mqttClientId,mqttUsername,mqttPassword +Flow Meter 18,flowmeterskid,cp-flow-meter-18,conocophillipsmqtt,conocophillipsmqtt@1903 +Flow Meter 19,flowmeterskid,cp-flow-meter-19,conocophillipsmqtt,conocophillipsmqtt@1903 +Flow Meter 20,flowmeterskid,cp-flow-meter-20,conocophillipsmqtt,conocophillipsmqtt@1903 +Flow Meter 21,flowmeterskid,cp-flow-meter-21,conocophillipsmqtt,conocophillipsmqtt@1903 +Flow Meter 22,flowmeterskid,cp-flow-meter-22,conocophillipsmqtt,conocophillipsmqtt@1903 \ No newline at end of file diff --git a/devices.csv b/devices.csv new file mode 100644 index 0000000..26d069a --- /dev/null +++ b/devices.csv @@ -0,0 +1,6 @@ +name,type,mqtt client id,mqtt user name,mqtt password +Flow Meter 18,flowmeterskid,cp-flow-meter-18,conocophillipsmqtt,conocophillipsmqtt@1903 +Flow Meter 19,flowmeterskid,cp-flow-meter-19,conocophillipsmqtt,conocophillipsmqtt@1903 +Flow Meter 20,flowmeterskid,cp-flow-meter-20,conocophillipsmqtt,conocophillipsmqtt@1903 +Flow Meter 21,flowmeterskid,cp-flow-meter-21,conocophillipsmqtt,conocophillipsmqtt@1903 +Flow Meter 22,flowmeterskid,cp-flow-meter-22,conocophillipsmqtt,conocophillipsmqtt@1903 \ No newline at end of file