515 lines
20 KiB
Python
515 lines
20 KiB
Python
import json, os, time, math, uuid
|
|
from datetime import datetime as dt
|
|
from common.Logger import logger
|
|
from quickfaas.remotebus import publish
|
|
from quickfaas.global_dict import get as get_params
|
|
from quickfaas.global_dict import _set_global_args
|
|
|
|
class RuntimeStats:
|
|
def __init__(self):
|
|
self.runs = {}
|
|
self.currentRun = 0
|
|
self.today = ""
|
|
self.todayString = ""
|
|
|
|
def manageTime(self):
|
|
if self.todayString != dt.strftime(dt.today(), "%Y-%m-%d"):
|
|
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
|
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.mktime(dt.strptime(self.todayString + " 23:59:59", "%Y-%m-%d %H:%M:%S").timetuple())
|
|
self.addDay()
|
|
self.today = dt.today()
|
|
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
|
days = list(self.runs.keys())
|
|
days.sort()
|
|
while (dt.strptime(days[-1],"%Y-%m-%d") - dt.strptime(days[0], "%Y-%m-%d")).days > 30:
|
|
self.removeDay(day=days[0])
|
|
days = list(self.runs.keys())
|
|
days.sort()
|
|
|
|
def addHertzDataPoint(self, frequency):
|
|
if frequency > 0:
|
|
self.manageTime()
|
|
try:
|
|
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"].append(frequency)
|
|
except:
|
|
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"] = [frequency]
|
|
|
|
def startRun(self):
|
|
if self.checkRunning():
|
|
self.endRun()
|
|
self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] = time.time()
|
|
|
|
def endRun(self):
|
|
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.time()
|
|
self.currentRun += 1
|
|
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
|
|
|
def checkRunning(self):
|
|
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
|
return True
|
|
return False
|
|
|
|
def addDay(self):
|
|
self.today = dt.today()
|
|
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
|
self.currentRun = 1
|
|
self.runs[self.todayString] = {}
|
|
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
|
|
|
def countRunsDay(self, day=None):
|
|
if not day:
|
|
day = self.todayString
|
|
return len(self.runs[day].keys())
|
|
|
|
def countRunsMultiDay(self, numDays=30):
|
|
total_runs = 0
|
|
for day in list(self.runs.keys()):
|
|
total_runs += self.countRunsDay(day=day)
|
|
return total_runs
|
|
|
|
def calculateAverageHertzDay(self, day=None, returnArray=False):
|
|
dayFrequencies = []
|
|
if not day:
|
|
day = self.todayString
|
|
for run in list(self.runs[day].keys()):
|
|
try:
|
|
dayFrequencies += self.runs[day][run]["frequencies"]
|
|
except Exception as e:
|
|
print("{} missing frequency data for {}".format(day,run))
|
|
if returnArray:
|
|
return dayFrequencies
|
|
return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)
|
|
|
|
def calculateAverageHertzMultiDay(self, numDays=30):
|
|
self.manageTime()
|
|
frequencies = []
|
|
for day in list(self.runs.keys()):
|
|
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
|
try:
|
|
frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)
|
|
except Exception as e:
|
|
print("{} missing frequency data".format(day))
|
|
if len(frequencies):
|
|
return round(math.fsum(frequencies)/len(frequencies), 2)
|
|
return 0
|
|
|
|
def calculateRunTimeDay(self, day=None, convertToHours=True):
|
|
total_time = 0
|
|
if not day:
|
|
day = self.todayString
|
|
for run in list(self.runs[day].keys()):
|
|
total_time = self.runs[day][run]["end"] - self.runs[day][run]["start"] + total_time
|
|
if convertToHours:
|
|
return self.convertSecondstoHours(total_time)
|
|
return total_time
|
|
|
|
def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):
|
|
total_time = 0
|
|
for day in list(self.runs.keys()):
|
|
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
|
total_time += self.calculateRunTimeDay(day=day, convertToHours=False)
|
|
if convertToHours:
|
|
return self.convertSecondstoHours(total_time)
|
|
return total_time
|
|
|
|
def calculateRunPercentDay(self, day=None, precise=False):
|
|
if not day:
|
|
day = self.todayString
|
|
if precise:
|
|
return (self.calculateRunTimeDay(day=day)/24) * 100
|
|
return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)
|
|
|
|
|
|
def calculateRunPercentMultiDay(self, numDays=30, precise=False):
|
|
self.manageTime()
|
|
if precise:
|
|
return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100
|
|
return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)
|
|
|
|
def removeDay(self, day=None):
|
|
if not day:
|
|
raise Exception("Day can not be None")
|
|
print("removing day {}".format(day))
|
|
del self.runs[day]
|
|
|
|
def convertSecondstoHours(self, seconds):
|
|
return round(seconds / (60*60),2)
|
|
|
|
def loadDataFromFile(self, filePath="/var/user/files/runtimestats.json"):
|
|
try:
|
|
with open(filePath, "r") as f:
|
|
temp = json.load(f)
|
|
self.runs = temp["data"]
|
|
self.currentRun = temp["current_run"]
|
|
self.today = dt.strptime(temp["current_day"], "%Y-%m-%d")
|
|
self.todayString = temp["current_day"]
|
|
self.manageTime()
|
|
except:
|
|
print("Could not find file at {}".format(filePath))
|
|
print("creating file")
|
|
self.addDay()
|
|
try:
|
|
with open(filePath, "w") as f:
|
|
d = {
|
|
"current_run": self.currentRun,
|
|
"current_day": self.todayString,
|
|
"data": self.runs
|
|
}
|
|
json.dump(d, f, indent=4)
|
|
except Exception as e:
|
|
print(e)
|
|
|
|
def saveDataToFile(self, filePath="/var/user/files/runtimestats.json"):
|
|
try:
|
|
print("Saving Runs")
|
|
with open(filePath, "w") as f:
|
|
d = {
|
|
"current_run": self.currentRun,
|
|
"current_day": self.todayString,
|
|
"data": self.runs
|
|
}
|
|
json.dump(d, f, indent=4)
|
|
except Exception as e:
|
|
print(e)
|
|
|
|
try:
|
|
rts_path = "/var/user/files/runtimestats_tb.json"
|
|
rts = RuntimeStats()
|
|
rts.loadDataFromFile(filePath=rts_path)
|
|
rts.saveDataToFile(filePath=rts_path)
|
|
except Exception as e:
|
|
logger.error(f"Error in TB start RTS: {e}" )
|
|
|
|
def reboot(reason="Rebooting for config file update"):
|
|
#basic = Basic()
|
|
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
|
|
logger.info(reason)
|
|
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
|
|
logger.info(f"REBOOT : {r}")
|
|
|
|
def checkFileExist(filename):
|
|
path = "/var/user/files"
|
|
if not os.path.exists(path):
|
|
logger.debug("no folder making files folder in var/user")
|
|
os.makedirs(path)
|
|
with open(path + "/" + filename, "a") as f:
|
|
json.dump({}, f)
|
|
if not os.path.exists(path + "/" + filename):
|
|
logger.debug("no creds file making creds file")
|
|
with open(path + "/" + filename, "a") as f:
|
|
json.dump({}, f)
|
|
|
|
def convertDStoJSON(ds):
|
|
j = dict()
|
|
for x in ds:
|
|
j[x["key"]] = x["value"]
|
|
return j
|
|
|
|
def convertJSONtoDS(j):
|
|
d = []
|
|
for key in j.keys():
|
|
d.append({"key": key, "value": j[key]})
|
|
return d
|
|
|
|
def checkCredentialConfig():
|
|
logger.debug("CHECKING CONFIG")
|
|
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
|
|
credspath = "/var/user/files/creds.json"
|
|
cfg = dict()
|
|
with open(cfgpath, "r") as f:
|
|
cfg = json.load(f)
|
|
clouds = cfg.get("clouds")
|
|
logger.debug(clouds)
|
|
#if not configured then try to configure from stored values
|
|
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
|
|
checkFileExist("creds.json")
|
|
with open(credspath, "r") as c:
|
|
creds = json.load(c)
|
|
if creds:
|
|
logger.debug("updating config with stored data")
|
|
clouds[0]["args"]["clientId"] = creds["clientId"]
|
|
clouds[0]["args"]["username"] = creds["userName"]
|
|
clouds[0]["args"]["passwd"] = creds["password"]
|
|
cfg["clouds"] = clouds
|
|
cfg = checkParameterConfig(cfg)
|
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
|
reboot()
|
|
else:
|
|
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
|
|
checkFileExist("creds.json")
|
|
with open(credspath, "r") as c:
|
|
logger.debug("updating stored file with new data")
|
|
cfg = checkParameterConfig(cfg)
|
|
with open(cfgpath, "w", encoding='utf-8') as n:
|
|
json.dump(cfg, n, indent=1, ensure_ascii=False)
|
|
creds = json.load(c)
|
|
if creds:
|
|
if creds["clientId"] != clouds[0]["args"]["clientId"]:
|
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
|
if creds["userName"] != clouds[0]["args"]["username"]:
|
|
creds["userName"] = clouds[0]["args"]["username"]
|
|
if creds["password"] != clouds[0]["args"]["passwd"]:
|
|
creds["password"] = clouds[0]["args"]["passwd"]
|
|
else:
|
|
creds["clientId"] = clouds[0]["args"]["clientId"]
|
|
creds["userName"] = clouds[0]["args"]["username"]
|
|
creds["password"] = clouds[0]["args"]["passwd"]
|
|
with open(credspath, "w") as cw:
|
|
json.dump(creds,cw)
|
|
|
|
def checkParameterConfig(cfg):
|
|
logger.debug("Checking Parameters!!!!")
|
|
paramspath = "/var/user/files/params.json"
|
|
cfgparams = convertDStoJSON(cfg.get("labels"))
|
|
#check stored values
|
|
checkFileExist("params.json")
|
|
with open(paramspath, "r") as f:
|
|
logger.debug("Opened param storage file")
|
|
params = json.load(f)
|
|
if params:
|
|
if cfgparams != params:
|
|
#go through each param
|
|
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
|
|
#if key in cfg but not in params copy to params
|
|
logger.debug("equalizing params between cfg and stored")
|
|
for key in cfgparams.keys():
|
|
try:
|
|
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
|
|
params[key] = cfgparams[key]
|
|
except:
|
|
params[key] = cfgparams[key]
|
|
cfg["labels"] = convertJSONtoDS(params)
|
|
_set_global_args(convertJSONtoDS(params))
|
|
with open(paramspath, "w") as p:
|
|
json.dump(params, p)
|
|
else:
|
|
with open(paramspath, "w") as p:
|
|
logger.debug("initializing param file with params in memory")
|
|
json.dump(convertDStoJSON(get_params()), p)
|
|
cfg["labels"] = get_params()
|
|
|
|
return cfg
|
|
|
|
# Helper function to split the payload into chunks
|
|
def chunk_payload(payload, chunk_size=20):
|
|
chunked_values = list(payload["values"].items())
|
|
for i in range(0, len(chunked_values), chunk_size):
|
|
yield {
|
|
"ts": payload["ts"],
|
|
"values": dict(chunked_values[i:i+chunk_size])
|
|
}
|
|
|
|
def sendData(message):
|
|
#logger.debug(message)
|
|
try:
|
|
checkCredentialConfig()
|
|
except Exception as e:
|
|
logger.error(e)
|
|
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
|
|
for measure in message["measures"]:
|
|
try:
|
|
logger.debug(measure)
|
|
if abs(payload["ts"]/1000 - measure["timestamp"]) > 3600:
|
|
reboot(reason="Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}".format(payload["ts"]/1000,measure["timestamp"]))
|
|
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
|
|
logger.debug("Converting DINT/BOOL to STRING")
|
|
value = convert_int(measure["name"], measure["value"])
|
|
logger.debug("Converted {} to {}".format(measure["value"], value))
|
|
payload["values"][measure["name"]] = value
|
|
payload["values"][measure["name"] + "_int"] = measure["value"]
|
|
else:
|
|
payload["values"][measure["name"]] = measure["value"]
|
|
if measure["name"] == "wellstatus":
|
|
if measure["value"] == 0 and not rts.runs[rts.todayString]["run_" + str(rts.currentRun)]["start"]:
|
|
rts.startRun()
|
|
rts.saveDataToFile(filePath=rts_path)
|
|
elif measure["value"] > 0 and rts.runs[rts.todayString]["run_" + str(rts.currentRun)]["start"] and not rts.runs[rts.todayString]["run_" + str(rts.currentRun)]["end"]:
|
|
rts.endRun()
|
|
rts.saveDataToFile(filePath=rts_path)
|
|
logger.info(f"Adding {rts.calculateRunPercentMultiDay()} to payload as percentRunTime30Days")
|
|
payload["values"]["percentRunTime30Days"] = rts.calculateRunPercentMultiDay()
|
|
|
|
|
|
if measure["name"] == "vfdfrequency":
|
|
if measure["value"] > 0:
|
|
rts.addHertzDataPoint(measure["value"])
|
|
rts.saveDataToFile(filePath=rts_path)
|
|
logger.info(f"Adding {rts.calculateAverageHertzMultiDay()} to payload as avgFrequency30Days")
|
|
payload["values"]["avgFrequency30Days"] = rts.calculateAverageHertzMultiDay()
|
|
|
|
except Exception as e:
|
|
logger.error(e)
|
|
|
|
for chunk in chunk_payload(payload=payload):
|
|
publish(__topic__, json.dumps(chunk), __qos__, cloud_name="ThingsBoard")
|
|
time.sleep(2)
|
|
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__, cloud_name="ThingsBoard")
|
|
|
|
def convert_int(plc_tag, value):
|
|
well_status_codes = {
|
|
0: "Running",
|
|
1: "Pumped Off",
|
|
2: "Alarmed",
|
|
3: "Locked Out",
|
|
4: "Stopped"
|
|
}
|
|
|
|
pid_control_codes = {
|
|
0: "Flow",
|
|
1: "Fluid Level",
|
|
2: "Tubing Pressure",
|
|
3: "Manual"
|
|
}
|
|
|
|
downhole_codes = {
|
|
0: "OK",
|
|
1: "Connecting",
|
|
2: "Open Circuit",
|
|
3: "Shorted",
|
|
4: "Cannot Decode"
|
|
}
|
|
|
|
permissive_codes = {
|
|
0: "OK",
|
|
1: "Flow",
|
|
2: "Intake Pressure",
|
|
3: "Intake Temperature",
|
|
4: "Tubing Pressure",
|
|
5: "VFD",
|
|
6: "Fluid Level",
|
|
7: "Min. Downtime"
|
|
}
|
|
|
|
alarm_codes = {
|
|
0: "OK",
|
|
1: "Alarm"
|
|
}
|
|
|
|
alarm_vfd_codes = {
|
|
0: "OK",
|
|
1: "Locked Out"
|
|
}
|
|
|
|
vfd_fault_codes = {
|
|
0: "No Fault",
|
|
2: "Auxiliary Input",
|
|
3: "Power Loss",
|
|
4: "UnderVoltage",
|
|
5: "OverVoltage",
|
|
7: "Motor Overload",
|
|
8: "Heatsink OverTemp",
|
|
9: "Thermister OverTemp",
|
|
10: "Dynamic Brake OverTemp",
|
|
12: "Hardware OverCurrent",
|
|
13: "Ground Fault",
|
|
14: "Ground Warning",
|
|
15: "Load Loss",
|
|
17: "Input Phase Loss",
|
|
18: "Motor PTC Trip",
|
|
19: "Task Overrun",
|
|
20: "Torque Prove Speed Band",
|
|
21: "Output Phase Loss",
|
|
24: "Decel Inhibit",
|
|
25: "OverSpeed Limit",
|
|
26: "Brake Slipped",
|
|
27: "Torque Prove Conflict",
|
|
28: "TP Encls Confict",
|
|
29: "Analog In Loss",
|
|
33: "Auto Restarts Exhausted",
|
|
35: "IPM OverCurrent",
|
|
36: "SW OverCurrent",
|
|
38: "Phase U to Ground",
|
|
39: "Phase V to Ground",
|
|
40: "Phase W to Ground",
|
|
41: "Phase UV Short",
|
|
42: "Phase VW Short",
|
|
43: "Phase WU Short",
|
|
44: "Phase UNeg to Ground",
|
|
45: "Phase VNeg to Ground",
|
|
46: "Phase WNeg to Ground",
|
|
48: "System Defaulted",
|
|
49: "Drive Powerup",
|
|
51: "Clear Fault Queue",
|
|
55: "Control Board Overtemp",
|
|
59: "Invalid Code",
|
|
61: "Shear Pin 1",
|
|
62: "Shear Pin 2",
|
|
64: "Drive Overload",
|
|
66: "OW Torque Level",
|
|
67: "Pump Off",
|
|
71: "Port 1 Adapter",
|
|
72: "Port 2 Adapter",
|
|
73: "Port 3 Adapter",
|
|
74: "Port 4 Adapter",
|
|
75: "Port 5 Adapter",
|
|
76: "Port 6 Adapter",
|
|
77: "IR Volts Range",
|
|
78: "FluxAmps Ref Range",
|
|
79: "Excessive Load",
|
|
80: "AutoTune Aborted",
|
|
81: "Port 1 DPI Loss",
|
|
82: "Port 2 DPI Loss",
|
|
83: "Port 3 DPI Loss",
|
|
84: "Port 4 DPI Loss",
|
|
85: "Port 5 DPI Loss",
|
|
86: "Port 6 DPI Loss",
|
|
87: "IXo Voltage Range",
|
|
91: "Primary Velocity Feedback Loss",
|
|
93: "Hardware Enable Check",
|
|
94: "Alternate Velocity Feedback Loss",
|
|
95: "Auxiliary Velocity Feedback Loss",
|
|
96: "Position Feedback Loss",
|
|
97: "Auto Tach Switch",
|
|
100: "Parameter Checksum",
|
|
101: "Power Down NVS Blank",
|
|
102: "NVS Not Blank",
|
|
103: "Power Down NVS Incompatible",
|
|
104: "Power Board Checksum",
|
|
106: "Incompat MCB-PB",
|
|
107: "Replaced MCB-PB",
|
|
108: "Analog Calibration Checksum",
|
|
110: "Invalid Power Board Data",
|
|
111: "Power Board Invalid ID",
|
|
112: "Power Board App Min Version",
|
|
113: "Tracking DataError",
|
|
115: "Power Down Table Full",
|
|
116: "Power Down Entry Too Large",
|
|
117: "Power Down Data Checksum",
|
|
118: "Power Board Power Down Checksum",
|
|
124: "App ID Changed",
|
|
125: "Using Backup App",
|
|
134: "Start on Power Up",
|
|
137: "External Precharge Error",
|
|
138: "Precharge Open",
|
|
141: "Autotune Enc Angle",
|
|
142: "Autotune Speed Restricted",
|
|
143: "Autotune Current Regulator",
|
|
144: "Autotune Inertia",
|
|
145: "Autotune Travel",
|
|
13035: "Net IO Timeout",
|
|
13037: "Net IO Timeout"
|
|
|
|
}
|
|
|
|
plc_tags = {
|
|
"wellstatus": well_status_codes.get(value, "Invalid Code"),
|
|
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
|
|
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
|
|
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
|
|
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
|
|
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
|
|
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
|
|
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
|
|
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
|
|
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
|
|
"runpermissive": permissive_codes.get(value, "Invalid Code"),
|
|
"startpermissive": permissive_codes.get(value, "Invalid Code"),
|
|
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
|
|
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
|
|
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
|
|
}
|
|
|
|
return plc_tags.get(plc_tag, "Invalid Tag")
|
|
|
|
|