Added report generator for thingsboard
This commit is contained in:
BIN
meshifyDrivers/.DS_Store
vendored
BIN
meshifyDrivers/.DS_Store
vendored
Binary file not shown.
301
meshifyDrivers/advvfdipp/Channel.py
Normal file
301
meshifyDrivers/advvfdipp/Channel.py
Normal file
@@ -0,0 +1,301 @@
|
||||
"""Define Meshify channel class."""
|
||||
import time
|
||||
import urllib
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from pycomm.cip.cip_base import CommError, DataError
|
||||
from file_logger import filelogger as log
|
||||
|
||||
|
||||
|
||||
TAG_DATAERROR_SLEEPTIME = 5
|
||||
|
||||
def binarray(intval):
|
||||
"""Split an integer into its bits."""
|
||||
bin_string = '{0:08b}'.format(intval)
|
||||
bin_arr = [i for i in bin_string]
|
||||
bin_arr.reverse()
|
||||
return bin_arr
|
||||
|
||||
|
||||
def read_tag(addr, tag, plc_type="CLX"):
|
||||
"""Read a tag from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
val = clx.read_tag(tag)
|
||||
clx.close()
|
||||
return val
|
||||
except DataError as err:
|
||||
clx.close()
|
||||
time.sleep(TAG_DATAERROR_SLEEPTIME)
|
||||
log.error("Data Error during readTag({}, {}): {}".format(addr, tag, err))
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
clx.close()
|
||||
log.error("Could not connect during readTag({}, {})".format(addr, tag))
|
||||
except AttributeError as err:
|
||||
clx.close()
|
||||
log.error("AttributeError during readTag({}, {}): \n{}".format(addr, tag, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
def read_array(addr, tag, start, end, plc_type="CLX"):
|
||||
"""Read an array from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
val = clx.read_tag(tag_w_index)
|
||||
arr_vals.append(round(val[0], 4))
|
||||
if arr_vals:
|
||||
clx.close()
|
||||
return arr_vals
|
||||
else:
|
||||
log.error("No length for {}".format(addr))
|
||||
clx.close()
|
||||
return False
|
||||
except Exception:
|
||||
log.error("Error during readArray({}, {}, {}, {})".format(addr, tag, start, end))
|
||||
err = clx.get_status()
|
||||
clx.close()
|
||||
log.error(err)
|
||||
clx.close()
|
||||
|
||||
|
||||
def write_tag(addr, tag, val, plc_type="CLX"):
|
||||
"""Write a tag value to the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
initial_val = clx.read_tag(tag)
|
||||
write_status = clx.write_tag(tag, val, initial_val[1])
|
||||
clx.close()
|
||||
return write_status
|
||||
except DataError as err:
|
||||
clx_err = clx.get_status()
|
||||
clx.close()
|
||||
log.error("--\nDataError during writeTag({}, {}, {}, plc_type={}) -- {}\n{}\n".format(addr, tag, val, plc_type, err, clx_err))
|
||||
|
||||
except CommError as err:
|
||||
clx_err = clx.get_status()
|
||||
log.error("--\nCommError during write_tag({}, {}, {}, plc_type={})\n{}\n--".format(addr, tag, val, plc_type, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
class Channel(object):
|
||||
"""Holds the configuration for a Meshify channel."""
|
||||
|
||||
def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
self.mesh_name = mesh_name
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def __str__(self):
|
||||
"""Create a string for the channel."""
|
||||
return "{}\nvalue: {}, last_send_time: {}".format(self.mesh_name, self.value, self.last_send_time)
|
||||
|
||||
def check(self, new_value, force_send=False):
|
||||
"""Check to see if the new_value needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.data_type == 'BOOL' or self.data_type == 'STRING':
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.value != new_value:
|
||||
if self.map_:
|
||||
if not self.value == self.map_[new_value]:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
else:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
else:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif abs(self.value - new_value) > self.chg_threshold:
|
||||
send_needed = True
|
||||
send_reason = "change threshold"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
if send_needed:
|
||||
self.last_value = self.value
|
||||
if self.map_:
|
||||
try:
|
||||
self.value = self.map_[new_value]
|
||||
except KeyError:
|
||||
log.error("Cannot find a map value for {} in {} for {}".format(new_value, self.map_, self.mesh_name))
|
||||
self.value = new_value
|
||||
else:
|
||||
self.value = new_value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
pass
|
||||
|
||||
|
||||
def identity(sent):
|
||||
"""Return exactly what was sent to it."""
|
||||
return sent
|
||||
|
||||
|
||||
class ModbusChannel(Channel):
|
||||
"""Modbus channel object."""
|
||||
|
||||
def __init__(self, mesh_name, register_number, data_type, chg_threshold, guarantee_sec, channel_size=1, map_=False, write_enabled=False, transform_fn=identity):
|
||||
"""Initialize the channel."""
|
||||
super(ModbusChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.mesh_name = mesh_name
|
||||
self.register_number = register_number
|
||||
self.channel_size = channel_size
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.transform_fn = transform_fn
|
||||
|
||||
def read(self, mbsvalue):
|
||||
"""Return the transformed read value."""
|
||||
return self.transform_fn(mbsvalue)
|
||||
|
||||
|
||||
class PLCChannel(Channel):
|
||||
"""PLC Channel Object."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False, plc_type='CLX'):
|
||||
"""Initialize the channel."""
|
||||
super(PLCChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.plc_type = plc_type
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
plc_value = None
|
||||
if self.plc_tag and self.plc_ip:
|
||||
read_value = read_tag(self.plc_ip, self.plc_tag, plc_type=self.plc_type)
|
||||
if read_value:
|
||||
plc_value = read_value[0]
|
||||
|
||||
return plc_value
|
||||
|
||||
|
||||
class BoolArrayChannels(Channel):
|
||||
"""Hold the configuration for a set of boolean array channels."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
super(BoolArrayChannels, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def compare_values(self, new_val_dict):
|
||||
"""Compare new values to old values to see if the values need storing."""
|
||||
send = False
|
||||
for idx in new_val_dict:
|
||||
try:
|
||||
if new_val_dict[idx] != self.last_value[idx]:
|
||||
send = True
|
||||
except KeyError:
|
||||
log.error("Key Error in self.compare_values for index {}".format(idx))
|
||||
send = True
|
||||
return send
|
||||
|
||||
def read(self, force_send=False):
|
||||
"""Read the value and check to see if needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.plc_tag:
|
||||
val = read_tag(self.plc_ip, self.plc_tag)
|
||||
if val:
|
||||
bool_arr = binarray(val[0])
|
||||
new_val = {}
|
||||
for idx in self.map_:
|
||||
try:
|
||||
new_val[self.map_[idx]] = bool_arr[idx]
|
||||
except KeyError:
|
||||
log.error("Not able to get value for index {}".format(idx))
|
||||
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.compare_values(new_val):
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
|
||||
if send_needed:
|
||||
self.value = new_val
|
||||
self.last_value = self.value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
49
meshifyDrivers/advvfdipp/Tags.py
Normal file
49
meshifyDrivers/advvfdipp/Tags.py
Normal file
@@ -0,0 +1,49 @@
|
||||
from Channel import PLCChannel, ModbusChannel
|
||||
from advvfdipp import PLC_IP_ADDRESS
|
||||
|
||||
tags = [
|
||||
PLCChannel(PLC_IP_ADDRESS, "flowrate","val_Flowmeter","REAL", 300, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "fluidlevel","val_FluidLevel","REAL", 2, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "intakepressure","val_IntakePressure","REAL", 10, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "intaketemperature","val_IntakeTemperature","REAL", 5, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "tubingpressure","val_TubingPressure","REAL", 10, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "pidcontrolmode","sts_PID_Control","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "wellstatus","Device_Status_INT","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "vfdfrequency","VFD_SpeedFdbk","REAL", 5, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "flowtotal","Flow_Total[0]","REAL", 100, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "energytotal","Energy_Total[0]","REAL", 10, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "vfdcurrent","VFD_OutCurrent","REAL", 5, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "downholesensorstatus","Downhole_Sensor_Status_INT","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "fluidspecificgravity","cfg_FluidSpecificGravity","REAL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "flowtotalyesterday","Flow_Total[1]","REAL", 100, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "energytotalyesterday","Energy_Total[1]","REAL", 10, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmflowrate","alarm_Flowmeter","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmintakepressure","alarm_IntakePressure","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmintaketemperature","alarm_IntakeTemperature","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmtubingpressure","alarm_TubingPressure","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmvfd","alarm_VFD","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmlockout","alarm_Lockout","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "runpermissive","Run_Permissive_INT","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "startpermissive","Start_Permissive_INT","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "startcommand","cmd_Start","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "stopcommand","cmd_Stop","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "flowsetpoint","cfg_PID_FlowSP","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "fluidlevelsetpoint","cfg_PID_FluidLevelSP","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "manualfrequencysetpoint","cfg_PID_ManualSP","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "tubingpressuresetpoint","cfg_PID_TubingPressureSP","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "alarmfluidlevel","alarm_FluidLevel","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "pressureshutdownlimit","AIn_IntakePressure.Val_LoLim","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "pressurestartuplimit","AIn_IntakePressure.Val_HiLim","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "temperatureshutdownlimit","AIn_IntakeTemperature.Val_HiLim","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "temperaturestartuplimit","AIn_IntakeTemperature.Val_LoLim","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "sensorheight","cfg_DHSensorDistToIntake","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "last_vfd_fault_code","PowerFlex755.Val_LastFaultCode","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "vfd_fault","sts_CurrentVFDFaultCode","INT", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "controllerfault_io","ControllerFault_IO","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "controllerfault_program","ControllerFault_Program","BOOL", 1, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "minvfdfrequency","PowerFlex755.Cfg_MinSpdRef","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "maxvfdfrequency","PowerFlex755.Cfg_MaxSpdRef","REAL", 1, 86400, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "hartnettotal","in_HART_Flowmeter_Net","REAL", 100, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "hartfwdtotal","in_HART_Flowmeter_Fwd","REAL", 100, 3600, plc_type="CLX"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "hartrevtotal","in_HART_Flowmeter_Rev","REAL", 100, 3600, plc_type="CLX")
|
||||
]
|
||||
264
meshifyDrivers/advvfdipp/advvfdipp.py
Normal file
264
meshifyDrivers/advvfdipp/advvfdipp.py
Normal file
@@ -0,0 +1,264 @@
|
||||
"""Driver for advvfdipp"""
|
||||
|
||||
import threading
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from random import randint
|
||||
from datetime import datetime as dt
|
||||
# PERSISTENCE FILE
|
||||
import persistence
|
||||
PERSIST = persistence.load("extra_data.json")
|
||||
if not PERSIST:
|
||||
PERSIST = {'ip_address': '192.168.1.10', 'download_pycomm': True, 'flowmeter_units': 'GPM'}
|
||||
persistence.store(PERSIST, "extra_data.json")
|
||||
os.system('echo "" > /root/python_firmware/drivers/modbusMap.p')
|
||||
PLC_IP_ADDRESS = PERSIST['ip_address']
|
||||
from device_base import deviceBase
|
||||
import urllib
|
||||
if PERSIST['download_pycomm']:
|
||||
try:
|
||||
urllib.urlretrieve('http://s3.amazonaws.com/pocloud-drivers/pycomm/clx.py', '/root/python_firmware/pycomm/ab_comm/clx.py')
|
||||
urllib.urlretrieve('http://s3.amazonaws.com/pocloud-drivers/pycomm/cip_base.py', '/root/python_firmware/pycomm/cip/cip_base.py')
|
||||
PERSIST['download_pycomm'] = False
|
||||
persistence.store(PERSIST, "extra_data.json")
|
||||
except Exception as e:
|
||||
print("Could not download latest pycomm update: {}".format(e))
|
||||
|
||||
from Channel import PLCChannel, ModbusChannel,read_tag, write_tag, TAG_DATAERROR_SLEEPTIME
|
||||
from utilities import get_public_ip_address, get_private_ip_address, get_additional_tags, convert_int
|
||||
from file_logger import filelogger as log
|
||||
from Tags import tags
|
||||
from runtimeStats import RuntimeStats as RTS
|
||||
|
||||
path = "/root/python_firmware/drivers/additional_tags.py"
|
||||
|
||||
f = open(path, "a+")
|
||||
f.seek(0)
|
||||
if os.stat(path).st_size == 0:
|
||||
f.write("from Channel import PLCChannel, ModbusChannel\n")
|
||||
f.write("from advvfdipp import PLC_IP_ADDRESS\n")
|
||||
f.write("additional_tags = []")
|
||||
f.close()
|
||||
|
||||
|
||||
from additional_tags import additional_tags
|
||||
|
||||
_ = None
|
||||
|
||||
log.info("advvfdipp startup")
|
||||
|
||||
# GLOBAL VARIABLES
|
||||
WAIT_FOR_CONNECTION_SECONDS = 30
|
||||
IP_CHECK_PERIOD = 60
|
||||
WATCHDOG_ENABLE = False
|
||||
WATCHDOG_CHECK_PERIOD = 60
|
||||
WATCHDOG_SEND_PERIOD = 3600 # Seconds, the longest amount of time before sending the watchdog status
|
||||
|
||||
CHANNELS = tags + additional_tags
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
"""Start class required by Meshify."""
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None,
|
||||
companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
"""Initialize the driver."""
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q,
|
||||
mcu=mcu, companyId=companyId, offset=offset,
|
||||
mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.version = "18"
|
||||
self.finished = threading.Event()
|
||||
self.force_send = False
|
||||
self.public_ip_address = ""
|
||||
self.public_ip_address_last_checked = 0
|
||||
self.watchdog = False
|
||||
self.watchdog_last_checked = 0
|
||||
self.watchdog_last_sent = 0
|
||||
self.ping_counter = 0
|
||||
self.rts = RTS()
|
||||
self.rts.loadDataFromFile()
|
||||
self.rts.saveDataToFile()
|
||||
self.rts.manageTime()
|
||||
self.today = dt.now().date()
|
||||
threading.Thread.start(self)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
"""Register the driver."""
|
||||
# self.sendtodb("log", "BOOM! Booted.", 0)
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""Actually run the driver."""
|
||||
for i in range(0, WAIT_FOR_CONNECTION_SECONDS):
|
||||
print("advvfdipp driver will start in {} seconds".format(WAIT_FOR_CONNECTION_SECONDS - i))
|
||||
time.sleep(1)
|
||||
log.info("BOOM! Starting advvfdipp driver...")
|
||||
|
||||
|
||||
#self._check_watchdog()
|
||||
self._check_ip_address()
|
||||
|
||||
self.nodes["advvfdipp_0199"] = self
|
||||
try:
|
||||
if PERSIST['flowmeter_units']:
|
||||
self.sendtodbDev(1, 'flowunits', PERSIST['flowmeter_units'], 0, 'advvfdipp')
|
||||
else:
|
||||
PERSIST['flowmeter_units'] = "GPM"
|
||||
persistence.store(PERSIST, "extra_data.json")
|
||||
self.sendtodbDev(1, 'flowunits', PERSIST['flowmeter_units'], 0, 'advvfdipp')
|
||||
except:
|
||||
PERSIST['flowmeter_units'] = "GPM"
|
||||
persistence.store(PERSIST, "extra_data.json")
|
||||
self.sendtodbDev(1, 'flowunits', PERSIST['flowmeter_units'], 0, 'advvfdipp')
|
||||
send_loops = 0
|
||||
convert_list = ["Device_Status_INT","sts_PID_Control","Downhole_Sensor_Status_INT","alarm_Flowmeter","alarm_IntakePressure",
|
||||
"alarm_IntakeTemperature","alarm_TubingPressure","alarm_VFD","alarm_Lockout","alarm_FluidLevel","Run_Permissive_INT",
|
||||
"Start_Permissive_INT","PowerFlex755.Val_LastFaultCode","sts_CurrentVFDFaultCode"]
|
||||
while True:
|
||||
now = time.time()
|
||||
if self.force_send:
|
||||
log.warning("FORCE SEND: TRUE")
|
||||
if int(time.time()) % 600 == 0 or self.force_send:
|
||||
if self.force_send:
|
||||
payload = {"ts": time.time()*1000, "values": {}}
|
||||
else:
|
||||
payload = {"ts": round(time.time()/600)*600*1000, "values": {}}
|
||||
for chan in CHANNELS:
|
||||
try:
|
||||
val = chan.read()
|
||||
if "hart" in chan.mesh_name and val == None:
|
||||
val = 0.0
|
||||
|
||||
if chan.plc_tag in convert_list:
|
||||
converted_value = convert_int(chan.plc_tag, val)
|
||||
payload["values"][chan.mesh_name] = converted_value
|
||||
if chan.mesh_name == "wellstatus":
|
||||
if converted_value == "Running" and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"]:
|
||||
self.rts.startRun()
|
||||
self.rts.saveDataToFile()
|
||||
elif self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"] and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["end"]:
|
||||
self.rts.endRun()
|
||||
self.rts.saveDataToFile()
|
||||
payload["values"]["percentRunTime30Days"] = self.rts.calculateRunPercentMultiDay()
|
||||
elif chan.mesh_name == "vfdfrequency":
|
||||
if val > 0:
|
||||
self.rts.addHertzDataPoint(val)
|
||||
self.rts.saveDataToFile()
|
||||
payload["values"][chan.mesh_name] = val
|
||||
payload["values"]["avgFrequency30Days"] = self.rts.calculateAverageHertzMultiDay()
|
||||
else:
|
||||
payload["values"][chan.mesh_name] = val
|
||||
except Exception as e:
|
||||
log.info("Error: {}".format(e))
|
||||
try:
|
||||
self.rts.manageTime()
|
||||
if dt.now().date() != self.today:
|
||||
payload["values"]["avgFrequency30Days"] = self.rts.calculateAverageHertzMultiDay()
|
||||
payload["values"]["percentRunTime30Days"] = self.rts.calculateRunPercentMultiDay()
|
||||
self.today = dt.now().date()
|
||||
except Exception as e:
|
||||
log.error("Error: {}".format(e))
|
||||
self._check_ip_address()
|
||||
payload["values"]["public_ip_address"] = self.public_ip_address
|
||||
payload["values"]["private_ip_address"] = self.private_ip_address
|
||||
self.sendToTB(json.dumps(payload))
|
||||
self.sendToTBAttributes(json.dumps({"latestReportTime": round(time.time()/600)*600*1000}))
|
||||
time.sleep(10) #sleep for 30 seconds after a full poll
|
||||
# print("advvfdipp driver still alive...")
|
||||
if self.force_send:
|
||||
if send_loops > 2:
|
||||
log.warning("Turning off force_send")
|
||||
self.force_send = False
|
||||
send_loops = 0
|
||||
else:
|
||||
send_loops += 1
|
||||
|
||||
if WATCHDOG_ENABLE:
|
||||
if (now - self.watchdog_last_checked) > WATCHDOG_CHECK_PERIOD:
|
||||
self._check_watchdog()
|
||||
|
||||
if (now - self.public_ip_address_last_checked) > IP_CHECK_PERIOD:
|
||||
self._check_ip_address()
|
||||
|
||||
def _check_watchdog(self):
|
||||
"""Check the watchdog and send to Meshify if changed or stale."""
|
||||
test_watchdog = self.advvfdipp_watchdog()
|
||||
now = time.time()
|
||||
self.watchdog_last_checked = now
|
||||
if test_watchdog != self.watchdog or (now - self.watchdog_last_sent) > WATCHDOG_SEND_PERIOD:
|
||||
self.sendtodbDev(1, 'watchdog', test_watchdog, 0, 'advvfdipp')
|
||||
self.watchdog = test_watchdog
|
||||
self.watchdog_last_sent = now
|
||||
|
||||
|
||||
def _check_ip_address(self):
|
||||
"""Check the public IP address and send to Meshify if changed."""
|
||||
self.public_ip_address_last_checked = time.time()
|
||||
test_public_ip = get_public_ip_address()
|
||||
test_public_ip = test_public_ip
|
||||
test_private_ip = get_private_ip_address()
|
||||
if not test_public_ip == self.public_ip_address and not test_public_ip == "0.0.0.0":
|
||||
#self.sendtodbDev(1, 'public_ip_address', test_public_ip, 0, 'tankalarms')
|
||||
self.public_ip_address = test_public_ip
|
||||
if not test_private_ip == self.private_ip_address:
|
||||
#self.sendtodbDev(1, 'private_ip_address', test_private_ip, 0, 'tankalarms')
|
||||
self.private_ip_address = test_private_ip
|
||||
hostname = "8.8.8.8"
|
||||
response = 1
|
||||
try:
|
||||
response = os.system("ping -c 1 " + hostname + " > /dev/null 2>&1")
|
||||
except Exception as e:
|
||||
print("Something went wrong in ping: {}".format(e))
|
||||
|
||||
#and then check the response...
|
||||
if response == 0:
|
||||
print(hostname, 'is up!')
|
||||
self.ping_counter = 0
|
||||
else:
|
||||
print(hostname, 'is down!')
|
||||
self.ping_counter += 1
|
||||
|
||||
if self.ping_counter >= 3:
|
||||
print("Rebooting because no internet detected")
|
||||
os.system('reboot')
|
||||
|
||||
|
||||
def advvfdipp_watchdog(self):
|
||||
"""Write a random integer to the PLC and then 1 seconds later check that it has been decremented by 1."""
|
||||
randval = randint(0, 32767)
|
||||
write_tag(str(PLC_IP_ADDRESS), 'watchdog_INT', randval, plc_type="CLX")
|
||||
time.sleep(1)
|
||||
watchdog_val = read_tag(str(PLC_IP_ADDRESS), 'watchdog_INT', plc_type="CLX")
|
||||
try:
|
||||
return (randval - 1) == watchdog_val[0]
|
||||
except (KeyError, TypeError):
|
||||
return False
|
||||
|
||||
def advvfdipp_sync(self, name, value):
|
||||
"""Sync all data from the driver."""
|
||||
self.force_send = True
|
||||
# self.sendtodb("log", "synced", 0)
|
||||
return True
|
||||
|
||||
def advvfdipp_writeplctag(self, name, value):
|
||||
"""Write a value to the PLC."""
|
||||
new_val = json.loads(str(value).replace("'", '"'))
|
||||
tag_n = str(new_val['tag']) # "cmd_Start"
|
||||
val_n = new_val['val']
|
||||
write_res = write_tag(str(PLC_IP_ADDRESS), tag_n, val_n, plc_type="CLX")
|
||||
print("Result of advvfdipp_writeplctag(self, {}, {}) = {}".format(name, value, write_res))
|
||||
if write_res is None:
|
||||
write_res = "Error writing to PLC..."
|
||||
return write_res
|
||||
|
||||
def advvfdipp_flowunits(self, name, value):
|
||||
new_val = json.loads(str(value).replace("'", '"'))
|
||||
PERSIST['flowmeter_units'] = new_val
|
||||
persistence.store(PERSIST, "extra_data.json")
|
||||
self.sendtodbDev(1, 'flowunits', PERSIST['flowmeter_units'], 0, 'advvfdipp')
|
||||
15
meshifyDrivers/advvfdipp/config.txt
Normal file
15
meshifyDrivers/advvfdipp/config.txt
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"files": {
|
||||
"file3": "file_logger.py",
|
||||
"file2": "Channel.py",
|
||||
"file1": "advvfdipp.py",
|
||||
"file6": "persistence.py",
|
||||
"file5": "utilities.py",
|
||||
"file4": "Tags.py",
|
||||
"file5": "runtimeStats.py"
|
||||
},
|
||||
"deviceName": "advvfdipp",
|
||||
"releaseVersion": "19",
|
||||
"driverFileName": "advvfdipp.py",
|
||||
"driverId": "0100"
|
||||
}
|
||||
205
meshifyDrivers/advvfdipp/device_base.py
Normal file
205
meshifyDrivers/advvfdipp/device_base.py
Normal file
@@ -0,0 +1,205 @@
|
||||
import types
|
||||
import traceback
|
||||
import binascii
|
||||
import threading
|
||||
import time
|
||||
import thread
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
class deviceBase():
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
self.offset = offset
|
||||
self.company = companyId
|
||||
self.name = name
|
||||
self.number = number
|
||||
self.q = Q
|
||||
self.deviceName = name + '_[' + mac + ':' + number[0:2] + ':' + number[2:] + ']!'
|
||||
self.chName = "M1" + '_[' + mac + ':'
|
||||
self.chName2 = '_[' + mac + ':'
|
||||
print 'device name is:'
|
||||
print self.deviceName
|
||||
mac2 = mac.replace(":", "")
|
||||
self.mac = mac2.upper()
|
||||
self.address = 1
|
||||
self.debug = True
|
||||
self.mcu = mcu
|
||||
self.firstRun = True
|
||||
self.mqtt = mqtt
|
||||
self.nodes = Nodes
|
||||
#local dictionary of derived nodes ex: localNodes[tank_0199] = self
|
||||
self.localNodes = {}
|
||||
os.system("chmod 777 /root/reboot")
|
||||
os.system("echo nameserver 8.8.8.8 > /etc/resolv.conf")
|
||||
|
||||
|
||||
def sendtodbLoc(self, ch, channel, value, timestamp, deviceName, mac):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
#make the techname
|
||||
lst = textwrap.wrap(str(mac), width=2)
|
||||
tech = ""
|
||||
for i in range(len(lst)):
|
||||
tech += lst[i].lower() + ":"
|
||||
|
||||
|
||||
chName2 = '_[' + tech
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
if len(ch) > 2:
|
||||
ch = ch[:-2]
|
||||
|
||||
dname = deviceName + chName2 + str(ch) + ":98]!"
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDevJSON(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLora(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if ":" not in ch:
|
||||
ch = ch[0:2] + ":" + ch[2:4]
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch).replace(':', "")
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + "]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDev(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendToTB(self, payload):
|
||||
topic = 'v1/devices/me/telemetry'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendToTBAttributes(self, payload):
|
||||
topic = 'v1/devices/me/attributes'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendtodbCH(self, ch, channel, value, timestamp):
|
||||
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(ch)
|
||||
|
||||
dname = self.chName + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodb(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbJSON(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
def getTime(self):
|
||||
return str(int(time.time() + int(self.offset)))
|
||||
|
||||
|
||||
|
||||
|
||||
14
meshifyDrivers/advvfdipp/driverConfig.json
Normal file
14
meshifyDrivers/advvfdipp/driverConfig.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "advvfdipp",
|
||||
"driverFilename": "advvfdipp.py",
|
||||
"driverId": "0000",
|
||||
"additionalDriverFiles": [
|
||||
"utilities.py",
|
||||
"persistence.py",
|
||||
"Channel.py",
|
||||
"logger.py",
|
||||
"Tags.py"
|
||||
],
|
||||
"version": 1,
|
||||
"s3BucketName": "advvfdipp"
|
||||
}
|
||||
18
meshifyDrivers/advvfdipp/file_logger.py
Normal file
18
meshifyDrivers/advvfdipp/file_logger.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Logging setup for advvfdipp"""
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import sys
|
||||
|
||||
log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s')
|
||||
log_file = './advvfdipp.log'
|
||||
my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=500*1024,
|
||||
backupCount=2, encoding=None, delay=0)
|
||||
my_handler.setFormatter(log_formatter)
|
||||
my_handler.setLevel(logging.INFO)
|
||||
filelogger = logging.getLogger('advvfdipp')
|
||||
filelogger.setLevel(logging.INFO)
|
||||
filelogger.addHandler(my_handler)
|
||||
|
||||
console_out = logging.StreamHandler(sys.stdout)
|
||||
console_out.setFormatter(log_formatter)
|
||||
filelogger.addHandler(console_out)
|
||||
21
meshifyDrivers/advvfdipp/persistence.py
Normal file
21
meshifyDrivers/advvfdipp/persistence.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Data persistance functions."""
|
||||
# if more advanced persistence is needed, use a sqlite database
|
||||
import json
|
||||
|
||||
|
||||
def load(filename="persist.json"):
|
||||
"""Load persisted settings from the specified file."""
|
||||
try:
|
||||
with open(filename, 'r') as persist_file:
|
||||
return json.load(persist_file)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def store(persist_obj, filename="persist.json"):
|
||||
"""Store the persisting settings into the specified file."""
|
||||
try:
|
||||
with open(filename, 'w') as persist_file:
|
||||
return json.dump(persist_obj, persist_file, indent=4)
|
||||
except Exception:
|
||||
return False
|
||||
172
meshifyDrivers/advvfdipp/runtimeStats.py
Normal file
172
meshifyDrivers/advvfdipp/runtimeStats.py
Normal file
@@ -0,0 +1,172 @@
|
||||
from datetime import datetime as dt
|
||||
import time
|
||||
import json
|
||||
import math
|
||||
|
||||
class RuntimeStats:
|
||||
|
||||
def __init__(self):
|
||||
self.runs = {}
|
||||
self.currentRun = 0
|
||||
self.today = ""
|
||||
self.todayString = ""
|
||||
|
||||
def manageTime(self):
|
||||
if self.todayString != dt.strftime(dt.today(), "%Y-%m-%d"):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.mktime(dt.strptime(self.todayString + " 23:59:59", "%Y-%m-%d %H:%M:%S").timetuple())
|
||||
self.addDay()
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
while (dt.strptime(days[-1],"%Y-%m-%d") - dt.strptime(days[0], "%Y-%m-%d")).days > 30:
|
||||
self.removeDay(day=days[0])
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
|
||||
def addHertzDataPoint(self, frequency):
|
||||
if frequency > 0:
|
||||
self.manageTime()
|
||||
try:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"].append(frequency)
|
||||
except:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"] = [frequency]
|
||||
|
||||
def startRun(self):
|
||||
if self.checkRunning():
|
||||
self.endRun()
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] = time.time()
|
||||
|
||||
def endRun(self):
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.time()
|
||||
self.currentRun += 1
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def checkRunning(self):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def addDay(self):
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
self.currentRun = 1
|
||||
self.runs[self.todayString] = {}
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def countRunsDay(self, day=None):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
return len(self.runs[day].keys())
|
||||
|
||||
def countRunsMultiDay(self, numDays=30):
|
||||
total_runs = 0
|
||||
for day in list(self.runs.keys()):
|
||||
total_runs += self.countRunsDay(day=day)
|
||||
return total_runs
|
||||
|
||||
def calculateAverageHertzDay(self, day=None, returnArray=False):
|
||||
dayFrequencies = []
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
try:
|
||||
dayFrequencies += self.runs[day][run]["frequencies"]
|
||||
except Exception as e:
|
||||
print("{} missing frequency data for {}".format(day,run))
|
||||
if returnArray:
|
||||
return dayFrequencies
|
||||
return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)
|
||||
|
||||
def calculateAverageHertzMultiDay(self, numDays=30):
|
||||
self.manageTime()
|
||||
frequencies = []
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
try:
|
||||
frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)
|
||||
except Exception as e:
|
||||
print("{} missing frequency data".format(day))
|
||||
if len(frequencies):
|
||||
return round(math.fsum(frequencies)/len(frequencies), 2)
|
||||
return 0
|
||||
|
||||
def calculateRunTimeDay(self, day=None, convertToHours=True):
|
||||
total_time = 0
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
total_time = self.runs[day][run]["end"] - self.runs[day][run]["start"] + total_time
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):
|
||||
total_time = 0
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
total_time += self.calculateRunTimeDay(day=day, convertToHours=False)
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunPercentDay(self, day=None, precise=False):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
if precise:
|
||||
return (self.calculateRunTimeDay(day=day)/24) * 100
|
||||
return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)
|
||||
|
||||
|
||||
def calculateRunPercentMultiDay(self, numDays=30, precise=False):
|
||||
self.manageTime()
|
||||
if precise:
|
||||
return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100
|
||||
return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)
|
||||
|
||||
def removeDay(self, day=None):
|
||||
if not day:
|
||||
raise Exception("Day can not be None")
|
||||
print("removing day {}".format(day))
|
||||
del self.runs[day]
|
||||
|
||||
def convertSecondstoHours(self, seconds):
|
||||
return round(seconds / (60*60),2)
|
||||
|
||||
def loadDataFromFile(self, filePath="./runtimestats.json"):
|
||||
try:
|
||||
with open(filePath, "r") as f:
|
||||
temp = json.load(f)
|
||||
self.runs = temp["data"]
|
||||
self.currentRun = temp["current_run"]
|
||||
self.today = dt.strptime(temp["current_day"], "%Y-%m-%d")
|
||||
self.todayString = temp["current_day"]
|
||||
self.manageTime()
|
||||
except:
|
||||
print("Could not find file at {}".format(filePath))
|
||||
print("creating file")
|
||||
self.addDay()
|
||||
try:
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
def saveDataToFile(self, filePath="./runtimestats.json"):
|
||||
try:
|
||||
print("Saving Runs")
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
240
meshifyDrivers/advvfdipp/utilities.py
Normal file
240
meshifyDrivers/advvfdipp/utilities.py
Normal file
@@ -0,0 +1,240 @@
|
||||
"""Utility functions for the driver."""
|
||||
import socket
|
||||
import struct
|
||||
from Channel import PLCChannel
|
||||
import urllib
|
||||
import contextlib
|
||||
|
||||
def get_private_ip_address():
|
||||
"""Find the private IP Address of the host device."""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
try:
|
||||
sock.connect(("8.8.8.8", 80))
|
||||
except Exception as e:
|
||||
return e
|
||||
ip_address = sock.getsockname()[0]
|
||||
sock.close()
|
||||
return ip_address
|
||||
|
||||
def get_public_ip_address():
|
||||
ip_address = "0.0.0.0"
|
||||
try:
|
||||
with contextlib.closing(urllib.urlopen("http://checkip.amazonaws.com")) as url:
|
||||
ip_address = url.read()
|
||||
except Exception as e:
|
||||
print("could not resolve check IP: {}".format(e))
|
||||
return ip_address
|
||||
return ip_address[:-1]
|
||||
|
||||
def int_to_float16(int_to_convert):
|
||||
"""Convert integer into float16 representation."""
|
||||
bin_rep = ('0' * 16 + '{0:b}'.format(int_to_convert))[-16:]
|
||||
sign = 1.0
|
||||
if int(bin_rep[0]) == 1:
|
||||
sign = -1.0
|
||||
exponent = float(int(bin_rep[1:6], 2))
|
||||
fraction = float(int(bin_rep[6:17], 2))
|
||||
|
||||
if exponent == float(0b00000):
|
||||
return sign * 2 ** -14 * fraction / (2.0 ** 10.0)
|
||||
elif exponent == float(0b11111):
|
||||
if fraction == 0:
|
||||
return sign * float("inf")
|
||||
return float("NaN")
|
||||
frac_part = 1.0 + fraction / (2.0 ** 10.0)
|
||||
return sign * (2 ** (exponent - 15)) * frac_part
|
||||
|
||||
|
||||
def ints_to_float(int1, int2):
|
||||
"""Convert 2 registers into a floating point number."""
|
||||
mypack = struct.pack('>HH', int1, int2)
|
||||
f_unpacked = struct.unpack('>f', mypack)
|
||||
print("[{}, {}] >> {}".format(int1, int2, f_unpacked[0]))
|
||||
return f_unpacked[0]
|
||||
|
||||
|
||||
def degf_to_degc(temp_f):
|
||||
"""Convert deg F to deg C."""
|
||||
return (temp_f - 32.0) * (5.0/9.0)
|
||||
|
||||
|
||||
def degc_to_degf(temp_c):
|
||||
"""Convert deg C to deg F."""
|
||||
return temp_c * 1.8 + 32.0
|
||||
|
||||
def get_additional_tags(tag_dict):
|
||||
tags_array = tag_dict['additional_tags']
|
||||
channel_array = []
|
||||
for x in tags_array:
|
||||
try:
|
||||
print "Making channel {}".format(x)
|
||||
channel_array.append(PLCChannel(tag_dict['ip_address'], x['mesh_name'], x['plc_tag'], x['data_type'],x['chg_threshold'],x['guarantee_sec'],plc_type='CLX'))
|
||||
except Exception:
|
||||
print "Nothing to write or bad key"
|
||||
return channel_array
|
||||
|
||||
def convert_int(plc_tag, value):
|
||||
well_status_codes = {
|
||||
0: "Running",
|
||||
1: "Pumped Off",
|
||||
2: "Alarmed",
|
||||
3: "Locked Out",
|
||||
4: "Stopped"
|
||||
}
|
||||
|
||||
pid_control_codes = {
|
||||
0: "Flow",
|
||||
1: "Fluid Level",
|
||||
2: "Tubing Pressure",
|
||||
3: "Manual"
|
||||
}
|
||||
|
||||
downhole_codes = {
|
||||
0: "OK",
|
||||
1: "Connecting",
|
||||
2: "Open Circuit",
|
||||
3: "Shorted",
|
||||
4: "Cannot Decode"
|
||||
}
|
||||
|
||||
permissive_codes = {
|
||||
0: "OK",
|
||||
1: "Flow",
|
||||
2: "Intake Pressure",
|
||||
3: "Intake Temperature",
|
||||
4: "Tubing Pressure",
|
||||
5: "VFD",
|
||||
6: "Fluid Level",
|
||||
7: "Min. Downtime"
|
||||
}
|
||||
|
||||
alarm_codes = {
|
||||
0: "OK",
|
||||
1: "Alarm"
|
||||
}
|
||||
|
||||
alarm_vfd_codes = {
|
||||
0: "OK",
|
||||
1: "Locked Out"
|
||||
}
|
||||
|
||||
vfd_fault_codes = {
|
||||
0: "No Fault",
|
||||
2: "Auxiliary Input",
|
||||
3: "Power Loss",
|
||||
4: "UnderVoltage",
|
||||
5: "OverVoltage",
|
||||
7: "Motor Overload",
|
||||
8: "Heatsink OverTemp",
|
||||
9: "Thermister OverTemp",
|
||||
10: "Dynamic Brake OverTemp",
|
||||
12: "Hardware OverCurrent",
|
||||
13: "Ground Fault",
|
||||
14: "Ground Warning",
|
||||
15: "Load Loss",
|
||||
17: "Input Phase Loss",
|
||||
18: "Motor PTC Trip",
|
||||
19: "Task Overrun",
|
||||
20: "Torque Prove Speed Band",
|
||||
21: "Output Phase Loss",
|
||||
24: "Decel Inhibit",
|
||||
25: "OverSpeed Limit",
|
||||
26: "Brake Slipped",
|
||||
27: "Torque Prove Conflict",
|
||||
28: "TP Encls Confict",
|
||||
29: "Analog In Loss",
|
||||
33: "Auto Restarts Exhausted",
|
||||
35: "IPM OverCurrent",
|
||||
36: "SW OverCurrent",
|
||||
38: "Phase U to Ground",
|
||||
39: "Phase V to Ground",
|
||||
40: "Phase W to Ground",
|
||||
41: "Phase UV Short",
|
||||
42: "Phase VW Short",
|
||||
43: "Phase WU Short",
|
||||
44: "Phase UNeg to Ground",
|
||||
45: "Phase VNeg to Ground",
|
||||
46: "Phase WNeg to Ground",
|
||||
48: "System Defaulted",
|
||||
49: "Drive Powerup",
|
||||
51: "Clear Fault Queue",
|
||||
55: "Control Board Overtemp",
|
||||
59: "Invalid Code",
|
||||
61: "Shear Pin 1",
|
||||
62: "Shear Pin 2",
|
||||
64: "Drive Overload",
|
||||
66: "OW Torque Level",
|
||||
67: "Pump Off",
|
||||
71: "Port 1 Adapter",
|
||||
72: "Port 2 Adapter",
|
||||
73: "Port 3 Adapter",
|
||||
74: "Port 4 Adapter",
|
||||
75: "Port 5 Adapter",
|
||||
76: "Port 6 Adapter",
|
||||
77: "IR Volts Range",
|
||||
78: "FluxAmps Ref Range",
|
||||
79: "Excessive Load",
|
||||
80: "AutoTune Aborted",
|
||||
81: "Port 1 DPI Loss",
|
||||
82: "Port 2 DPI Loss",
|
||||
83: "Port 3 DPI Loss",
|
||||
84: "Port 4 DPI Loss",
|
||||
85: "Port 5 DPI Loss",
|
||||
86: "Port 6 DPI Loss",
|
||||
87: "IXo Voltage Range",
|
||||
91: "Primary Velocity Feedback Loss",
|
||||
93: "Hardware Enable Check",
|
||||
94: "Alternate Velocity Feedback Loss",
|
||||
95: "Auxiliary Velocity Feedback Loss",
|
||||
96: "Position Feedback Loss",
|
||||
97: "Auto Tach Switch",
|
||||
100: "Parameter Checksum",
|
||||
101: "Power Down NVS Blank",
|
||||
102: "NVS Not Blank",
|
||||
103: "Power Down NVS Incompatible",
|
||||
104: "Power Board Checksum",
|
||||
106: "Incompat MCB-PB",
|
||||
107: "Replaced MCB-PB",
|
||||
108: "Analog Calibration Checksum",
|
||||
110: "Invalid Power Board Data",
|
||||
111: "Power Board Invalid ID",
|
||||
112: "Power Board App Min Version",
|
||||
113: "Tracking DataError",
|
||||
115: "Power Down Table Full",
|
||||
116: "Power Down Entry Too Large",
|
||||
117: "Power Down Data Checksum",
|
||||
118: "Power Board Power Down Checksum",
|
||||
124: "App ID Changed",
|
||||
125: "Using Backup App",
|
||||
134: "Start on Power Up",
|
||||
137: "External Precharge Error",
|
||||
138: "Precharge Open",
|
||||
141: "Autotune Enc Angle",
|
||||
142: "Autotune Speed Restricted",
|
||||
143: "Autotune Current Regulator",
|
||||
144: "Autotune Inertia",
|
||||
145: "Autotune Travel",
|
||||
13035: "Net IO Timeout",
|
||||
13037: "Net IO Timeout"
|
||||
|
||||
}
|
||||
|
||||
plc_tags = {
|
||||
"Device_Status_INT": well_status_codes.get(value, "Invalid Code"),
|
||||
"sts_PID_Control": pid_control_codes.get(value, "Invalid Code"),
|
||||
"Downhole_Sensor_Status_INT": downhole_codes.get(value, "Invalid Code"),
|
||||
"alarm_Flowmeter": alarm_codes.get(value, "Invalid Code"),
|
||||
"alarm_IntakePressure": alarm_codes.get(value, "Invalid Code"),
|
||||
"alarm_IntakeTemperature": alarm_codes.get(value, "Invalid Code"),
|
||||
"alarm_TubingPressure": alarm_codes.get(value, "Invalid Code"),
|
||||
"alarm_VFD": alarm_codes.get(value, "Invalid Code"),
|
||||
"alarm_Lockout": alarm_vfd_codes.get(value, "Invalid Code"),
|
||||
"alarm_FluidLevel": alarm_codes.get(value, "Invalid Code"),
|
||||
"Run_Permissive_INT": permissive_codes.get(value, "Invalid Code"),
|
||||
"Start_Permissive_INT": permissive_codes.get(value, "Invalid Code"),
|
||||
"PowerFlex755.Val_LastFaultCode": vfd_fault_codes.get(value, "Invalid Code"),
|
||||
"sts_CurrentVFDFaultCode": vfd_fault_codes.get(value, "Invalid Code")
|
||||
}
|
||||
|
||||
return plc_tags.get(plc_tag, "Invalid Tag")
|
||||
|
||||
@@ -143,6 +143,11 @@ class deviceBase():
|
||||
topic = 'v1/devices/me/telemetry'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendToTBAttributes(self, payload):
|
||||
topic = 'v1/devices/me/attributes'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendtodbCH(self, ch, channel, value, timestamp):
|
||||
|
||||
|
||||
615
meshifyDrivers/piflow/Channel.py
Normal file
615
meshifyDrivers/piflow/Channel.py
Normal file
@@ -0,0 +1,615 @@
|
||||
"""Define Meshify channel class."""
|
||||
import time
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from pycomm.cip.cip_base import CommError, DataError
|
||||
from file_logger import filelogger as log
|
||||
import minimalmodbus
|
||||
|
||||
minimalmodbus.BAUDRATE = 9600
|
||||
minimalmodbus.STOPBITS = 1
|
||||
|
||||
TAG_DATAERROR_SLEEPTIME = 5
|
||||
|
||||
def binarray(intval):
|
||||
"""Split an integer into its bits."""
|
||||
bin_string = '{0:08b}'.format(intval)
|
||||
bin_arr = [i for i in bin_string]
|
||||
bin_arr.reverse()
|
||||
return bin_arr
|
||||
|
||||
|
||||
def read_tag(addr, tag, plc_type="CLX"):
|
||||
"""Read a tag from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
val = clx.read_tag(tag)
|
||||
clx.close()
|
||||
return val
|
||||
except DataError as err:
|
||||
clx.close()
|
||||
time.sleep(TAG_DATAERROR_SLEEPTIME)
|
||||
log.error("Data Error during readTag({}, {}): {}".format(addr, tag, err))
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
clx.close()
|
||||
log.error("Could not connect during readTag({}, {})".format(addr, tag))
|
||||
except AttributeError as err:
|
||||
clx.close()
|
||||
log.error("AttributeError during readTag({}, {}): \n{}".format(addr, tag, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
def read_array(addr, tag, start, end, plc_type="CLX"):
|
||||
"""Read an array from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
val = clx.read_tag(tag_w_index)
|
||||
arr_vals.append(round(val[0], 4))
|
||||
if arr_vals:
|
||||
clx.close()
|
||||
return arr_vals
|
||||
else:
|
||||
log.error("No length for {}".format(addr))
|
||||
clx.close()
|
||||
return False
|
||||
except Exception:
|
||||
log.error("Error during readArray({}, {}, {}, {})".format(addr, tag, start, end))
|
||||
err = clx.get_status()
|
||||
clx.close()
|
||||
log.error(err)
|
||||
clx.close()
|
||||
|
||||
|
||||
def write_tag(addr, tag, val, plc_type="CLX"):
|
||||
"""Write a tag value to the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
initial_val = clx.read_tag(tag)
|
||||
write_status = clx.write_tag(tag, val, initial_val[1])
|
||||
clx.close()
|
||||
return write_status
|
||||
except DataError as err:
|
||||
clx_err = clx.get_status()
|
||||
clx.close()
|
||||
log.error("--\nDataError during writeTag({}, {}, {}, plc_type={}) -- {}\n{}\n".format(addr, tag, val, plc_type, err, clx_err))
|
||||
|
||||
except CommError as err:
|
||||
clx_err = clx.get_status()
|
||||
log.error("--\nCommError during write_tag({}, {}, {}, plc_type={})\n{}\n--".format(addr, tag, val, plc_type, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
class Channel(object):
|
||||
"""Holds the configuration for a Meshify channel."""
|
||||
|
||||
def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
self.mesh_name = mesh_name
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def __str__(self):
|
||||
"""Create a string for the channel."""
|
||||
return "{}\nvalue: {}, last_send_time: {}".format(self.mesh_name, self.value, self.last_send_time)
|
||||
|
||||
def check(self, new_value, force_send=False):
|
||||
"""Check to see if the new_value needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.data_type == 'BOOL' or self.data_type == 'STRING' or type(new_value) == str:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.value != new_value:
|
||||
if self.map_:
|
||||
if not self.value == self.map_[new_value]:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
else:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
else:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif abs(self.value - new_value) > self.chg_threshold:
|
||||
send_needed = True
|
||||
send_reason = "change threshold"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
if send_needed:
|
||||
self.last_value = self.value
|
||||
if self.map_:
|
||||
try:
|
||||
self.value = self.map_[new_value]
|
||||
except KeyError:
|
||||
log.error("Cannot find a map value for {} in {} for {}".format(new_value, self.map_, self.mesh_name))
|
||||
self.value = new_value
|
||||
else:
|
||||
self.value = new_value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
pass
|
||||
|
||||
|
||||
def identity(sent):
|
||||
"""Return exactly what was sent to it."""
|
||||
return sent
|
||||
|
||||
def volume_units(vunit):
|
||||
units = {
|
||||
0: "cm cubed/s",
|
||||
1: "cm cubed/min",
|
||||
2: "cm cubed/h",
|
||||
3: "cm cubed/d",
|
||||
4: "dm cubed/s",
|
||||
5: "dm cubed/min",
|
||||
6: "dm cubed/h",
|
||||
7: "dm cubed/d",
|
||||
8: "m cubed/s",
|
||||
9: "m cubed/min",
|
||||
10: "m cubed/h",
|
||||
11: "m cubed/d",
|
||||
12: "ml/s",
|
||||
13: "ml/min",
|
||||
14: "ml/h",
|
||||
15: "ml/d",
|
||||
16: "l/s",
|
||||
17: "l/min",
|
||||
18: "l/h (+)",
|
||||
19: "l/d",
|
||||
20: "hl/s",
|
||||
21: "hl/min",
|
||||
22: "hl/h",
|
||||
23: "hl/d",
|
||||
24: "Ml/s",
|
||||
25: "Ml/min",
|
||||
26: "Ml/h",
|
||||
27: "Ml/d",
|
||||
32: "af/s",
|
||||
33: "af/min",
|
||||
34: "af/h",
|
||||
35: "af/d",
|
||||
36: "ft cubed/s",
|
||||
37: "ft cubed/min",
|
||||
38: "ft cubed/h",
|
||||
39: "ft cubed/d",
|
||||
40: "fl oz/s (us)",
|
||||
41: "fl oz/min (us)",
|
||||
42: "fl oz/h (us)",
|
||||
43: "fl oz/d (us)",
|
||||
44: "gal/s (us)",
|
||||
45: "gal/min (us)",
|
||||
46: "gal/h (us)",
|
||||
47: "gal/d (us)",
|
||||
48: "Mgal/s (us)",
|
||||
49: "Mgal/min (us)",
|
||||
50: "Mgal/h (us)",
|
||||
51: "Mgal/d (us)",
|
||||
52: "bbl/s (us;liq.)",
|
||||
53: "bbl/min (us;liq.)",
|
||||
54: "bbl/h (us;liq.)",
|
||||
55: "bbl/d (us;liq.)",
|
||||
56: "bbl/s (us;beer)",
|
||||
57: "bbl/min (us;beer)",
|
||||
58: "bbl/h (us;beer)",
|
||||
59: "bbl/d (us;beer)",
|
||||
60: "bbl/s (us;oil)",
|
||||
61: "bbl/min (us;oil)",
|
||||
62: "bbl/h (us;oil)",
|
||||
63: "bbl/d (us;oil)",
|
||||
64: "bbl/s (us;tank)",
|
||||
65: "bbl/min (us;tank)",
|
||||
66: "bbl/h (us;tank)",
|
||||
67: "bbl/d (us;tank)",
|
||||
68: "gal/s (imp)",
|
||||
69: "gal/min (imp)",
|
||||
70: "gal/h (imp)",
|
||||
71: "gal/d (imp)",
|
||||
72: "Mgal/s (imp)",
|
||||
73: "Mgal/min (imp)",
|
||||
74: "Mgal/h (imp)",
|
||||
75: "Mgal/d (imp)",
|
||||
76: "bbl/s (imp;beer)",
|
||||
77: "bbl/min (imp;beer)",
|
||||
78: "bbl/h (imp;beer)",
|
||||
79: "bbl/d (imp;beer)",
|
||||
80: "bbl/s (imp;oil)",
|
||||
81: "bbl/min (imp;oil)",
|
||||
82: "bbl/h (imp;oil)",
|
||||
83: "bbl/d (imp;oil)",
|
||||
88: "kgal/s (us)",
|
||||
89: "kgal/min (us)",
|
||||
90: "kgal/h (us)",
|
||||
91: "kgal/d (us)",
|
||||
92: "MMft cubed/s",
|
||||
93: "MMft cubed/min",
|
||||
94: "MMft cubed/h",
|
||||
96: "Mft cubed/d"
|
||||
}
|
||||
return units[vunit]
|
||||
|
||||
def totalizer_units(tunit):
|
||||
|
||||
units = {
|
||||
0: "cm cubed",
|
||||
1: "dm cubed",
|
||||
2: "m cubed",
|
||||
3: "ml",
|
||||
4: "l",
|
||||
5: "hl",
|
||||
6: "Ml Mega",
|
||||
8: "af",
|
||||
9: "ft cubed",
|
||||
10: "fl oz (us)",
|
||||
11: "gal (us)",
|
||||
12: "Mgal (us)",
|
||||
13: "bbl (us;liq.)",
|
||||
14: "bbl (us;beer)",
|
||||
15: "bbl (us;oil)",
|
||||
16: "bbl (us;tank)",
|
||||
17: "gal (imp)",
|
||||
18: "Mgal (imp)",
|
||||
19: "bbl (imp;beer)",
|
||||
20: "bbl (imp;oil)",
|
||||
22: "kgal (us)",
|
||||
23: "Mft cubed",
|
||||
50: "g",
|
||||
51: "kg",
|
||||
52: "t",
|
||||
53: "oz",
|
||||
54: "lb",
|
||||
55: "STon",
|
||||
100: "Nl",
|
||||
101: "Nm cubed",
|
||||
102: "Sm cubed",
|
||||
103: "Sft cubed",
|
||||
104: "Sl",
|
||||
105: "Sgal (us)",
|
||||
106: "Sbbl (us;liq.)",
|
||||
107: "Sgal (imp)",
|
||||
108: "Sbbl (us;oil)",
|
||||
109: "MMSft cubed",
|
||||
110: "Nhl",
|
||||
251: "None"
|
||||
}
|
||||
return units[tunit]
|
||||
|
||||
def int_to_bits(n,x):
|
||||
return pad_to_x([int(digit) for digit in bin(n)[2:]],x) # [2:] to chop off the "0b" part
|
||||
|
||||
def pad_to_x(n,x):
|
||||
while len(n) < x:
|
||||
n = [0] + n
|
||||
return n
|
||||
|
||||
def status_codes(n):
|
||||
|
||||
status_array = int_to_bits(n,16)
|
||||
status_low = {
|
||||
0: "Stopped;",
|
||||
1: "Operating in Forward;",
|
||||
2: "Operating in Reverse;",
|
||||
3: "DC operating;"
|
||||
}
|
||||
status_mid = {
|
||||
0: "",
|
||||
1: "Speed searching;",
|
||||
2: "Accelerating;",
|
||||
3: "At constant speed;",
|
||||
4: "Decelerating;",
|
||||
5: "Decelerating to stop;",
|
||||
6: "H/W OCS;",
|
||||
7: "S/W OCS;",
|
||||
8: "Dwell operating;"
|
||||
}
|
||||
status_high = {
|
||||
0: "Normal state",
|
||||
4: "Warning occurred",
|
||||
8: "Fault occurred"
|
||||
}
|
||||
values = {
|
||||
0: 8,
|
||||
1: 4,
|
||||
2: 2,
|
||||
3: 1
|
||||
}
|
||||
|
||||
stats_low = status_array[12:]
|
||||
stats_mid = status_array[8:12]
|
||||
stats_high = status_array[:4]
|
||||
low = 0
|
||||
mid = 0
|
||||
high = 0
|
||||
for x in range(4):
|
||||
if stats_low[x] == 1:
|
||||
low = low + values[x]
|
||||
if stats_mid[x] == 1:
|
||||
mid = mid + values[x]
|
||||
if stats_high[x] == 1:
|
||||
high = high + values[x]
|
||||
|
||||
return status_low[low] + " " + status_mid[mid] + ' ' + status_high[high]
|
||||
|
||||
def fault_code_a(n):
|
||||
|
||||
fault_code_array = int_to_bits(n,16)
|
||||
|
||||
""" fault = {
|
||||
0: "OCT",
|
||||
1: "OVT",
|
||||
2: "EXT-A",
|
||||
3: "EST",
|
||||
4: "COL",
|
||||
5: "GFT",
|
||||
6: "OHT",
|
||||
7: "ETH",
|
||||
8: "OLT",
|
||||
9: "Reserved",
|
||||
10: "EXT-B",
|
||||
11: "EEP",
|
||||
12: "FAN",
|
||||
13: "POT",
|
||||
14: "IOLT",
|
||||
15: "LVT"
|
||||
} """
|
||||
fault = {
|
||||
0: "Overload Trip",
|
||||
1: "Underload Trip",
|
||||
2: "Inverter Overload Trip",
|
||||
3: "E-Thermal Trip",
|
||||
4: "Ground Fault Trip",
|
||||
5: "Output Image Trip",
|
||||
6: "Inmput Imaging Trip",
|
||||
7: "Reserved",
|
||||
8: "Reserved",
|
||||
9: "NTC Trip",
|
||||
10: "Overcurrent Trip",
|
||||
11: "Overvoltage Trip",
|
||||
12: "External Trip",
|
||||
13: "Arm Short",
|
||||
14: "Over Heat Trip",
|
||||
15: "Fuse Open Trip"
|
||||
}
|
||||
|
||||
faults = []
|
||||
counter = 15
|
||||
for x in range(16):
|
||||
if fault_code_array[x] == 1:
|
||||
faults = [fault[counter]] + faults
|
||||
counter = counter - 1
|
||||
return ' '.join(faults)
|
||||
|
||||
def fault_code_b(n):
|
||||
|
||||
fault_code_array = int_to_bits(n,8)
|
||||
|
||||
""" fault = {
|
||||
0: "COM",
|
||||
1: "Reserved",
|
||||
2: "NTC",
|
||||
3: "REEP",
|
||||
4: "OC2",
|
||||
5: "NBR",
|
||||
6: "SAFA",
|
||||
7: "SAFB"
|
||||
} """
|
||||
fault = {
|
||||
0: "Reserved",
|
||||
1: "Reserved",
|
||||
2: "Reserved",
|
||||
3: "FAN Trip",
|
||||
4: "Reserved",
|
||||
5: "Reserved",
|
||||
6: "Pre PID Fail",
|
||||
7: "Bad contact at basic I/O board",
|
||||
8: "External Brake Trip",
|
||||
9: "No Motor Trip",
|
||||
10: "Bad Option Card",
|
||||
11: "Reserved",
|
||||
12: "Reserved",
|
||||
13: "Reserved",
|
||||
14: "Pre Over Heat Trip",
|
||||
15: "Reserved"
|
||||
}
|
||||
|
||||
faults = []
|
||||
counter = 7
|
||||
for x in range(8):
|
||||
if fault_code_array[x] == 1:
|
||||
faults = [fault[counter]] + faults
|
||||
counter = counter - 1
|
||||
return ' '.join(faults)
|
||||
|
||||
class ModbusChannel(Channel):
|
||||
"""Modbus channel object."""
|
||||
|
||||
def __init__(self, mesh_name, register_number, data_type, chg_threshold, guarantee_sec, channel_size=1, map_=False, write_enabled=False, transform_fn=identity, unit_number=1, scaling=0):
|
||||
"""Initialize the channel."""
|
||||
super(ModbusChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.mesh_name = mesh_name
|
||||
self.register_number = register_number
|
||||
self.channel_size = channel_size
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.transform_fn = transform_fn
|
||||
self.unit_number = unit_number
|
||||
self.instrument = minimalmodbus.Instrument('/dev/ttyS0', self.unit_number)
|
||||
self.scaling= scaling
|
||||
|
||||
def read(self):
|
||||
"""Return the transformed read value."""
|
||||
if self.data_type == "FLOAT":
|
||||
try:
|
||||
read_value = self.instrument.read_float(self.register_number,4,self.channel_size)
|
||||
except IOError as e:
|
||||
log.info(e)
|
||||
return None
|
||||
|
||||
elif self.data_type == "INTEGER" or self.data_type == "STRING":
|
||||
try:
|
||||
read_value = self.instrument.read_register(self.register_number, self.scaling, 4)
|
||||
except IOError as e:
|
||||
log.info(e)
|
||||
return None
|
||||
read_value = self.transform_fn(read_value)
|
||||
return read_value
|
||||
|
||||
def write(self, value):
|
||||
"""Write a value to a register"""
|
||||
if self.data_type == "FLOAT":
|
||||
value = float(value)
|
||||
elif self.data_type == "INTEGER":
|
||||
value = int(value)
|
||||
else:
|
||||
value = str(value)
|
||||
try:
|
||||
self.instrument.write_register(self.register_number,value, self.scaling, 16 if self.channel_size > 1 else 6 )
|
||||
return True
|
||||
except Exception as e:
|
||||
log.info("Failed to write value: {}".format(e))
|
||||
return False
|
||||
|
||||
|
||||
class PLCChannel(Channel):
|
||||
"""PLC Channel Object."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False, plc_type='CLX'):
|
||||
"""Initialize the channel."""
|
||||
super(PLCChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.plc_type = plc_type
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
plc_value = None
|
||||
if self.plc_tag and self.plc_ip:
|
||||
read_value = read_tag(self.plc_ip, self.plc_tag, plc_type=self.plc_type)
|
||||
if read_value:
|
||||
plc_value = read_value[0]
|
||||
|
||||
return plc_value
|
||||
|
||||
|
||||
class BoolArrayChannels(Channel):
|
||||
"""Hold the configuration for a set of boolean array channels."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
super(BoolArrayChannels, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def compare_values(self, new_val_dict):
|
||||
"""Compare new values to old values to see if the values need storing."""
|
||||
send = False
|
||||
for idx in new_val_dict:
|
||||
try:
|
||||
if new_val_dict[idx] != self.last_value[idx]:
|
||||
send = True
|
||||
except KeyError:
|
||||
log.error("Key Error in self.compare_values for index {}".format(idx))
|
||||
send = True
|
||||
return send
|
||||
|
||||
def read(self, force_send=False):
|
||||
"""Read the value and check to see if needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.plc_tag:
|
||||
val = read_tag(self.plc_ip, self.plc_tag)
|
||||
if val:
|
||||
bool_arr = binarray(val[0])
|
||||
new_val = {}
|
||||
for idx in self.map_:
|
||||
try:
|
||||
new_val[self.map_[idx]] = bool_arr[idx]
|
||||
except KeyError:
|
||||
log.error("Not able to get value for index {}".format(idx))
|
||||
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.compare_values(new_val):
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
|
||||
if send_needed:
|
||||
self.value = new_val
|
||||
self.last_value = self.value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
563
meshifyDrivers/piflow/PiFlow.py
Normal file
563
meshifyDrivers/piflow/PiFlow.py
Normal file
@@ -0,0 +1,563 @@
|
||||
"""Driver for PiFlow"""
|
||||
import os
|
||||
import threading
|
||||
import json
|
||||
import time
|
||||
from random import randint
|
||||
from datetime import datetime as dt
|
||||
from device_base import deviceBase
|
||||
import persistence
|
||||
from utilities import get_public_ip_address, get_private_ip_address
|
||||
from file_logger import filelogger as log
|
||||
"""import RPi.GPIO as GPIO
|
||||
|
||||
Relay_Ch1 = 26
|
||||
Relay_Ch2 = 20
|
||||
Relay_Ch3 = 21
|
||||
|
||||
GPIO.setwarnings(False)
|
||||
GPIO.setmode(GPIO.BCM)
|
||||
|
||||
GPIO.setup(Relay_Ch1,GPIO.OUT)
|
||||
GPIO.output(Relay_Ch1, GPIO.HIGH)
|
||||
GPIO.setup(Relay_Ch2,GPIO.OUT)
|
||||
GPIO.output(Relay_Ch2, GPIO.HIGH)
|
||||
GPIO.setup(Relay_Ch3,GPIO.OUT)
|
||||
GPIO.output(Relay_Ch3, GPIO.HIGH)
|
||||
"""
|
||||
_ = None
|
||||
os.system('sudo timedatectl set-timezone America/Chicago')
|
||||
log.info("PiFlow startup")
|
||||
|
||||
# GLOBAL VARIABLES
|
||||
WAIT_FOR_CONNECTION_SECONDS = 5
|
||||
IP_CHECK_PERIOD = 60
|
||||
|
||||
|
||||
# PERSISTENCE FILE
|
||||
PERSIST = persistence.load('persist.json')
|
||||
if not PERSIST:
|
||||
PERSIST = {
|
||||
'flowmeter': 247,
|
||||
'drive': 1,
|
||||
'isVFD': False,
|
||||
'drive_enabled': True,
|
||||
'state': False,
|
||||
'state_timer': 0,
|
||||
'plc_ip': '192.168.1.12',
|
||||
'yesterday_totalizer_1': dt.today().day,
|
||||
'yesterday_totalizer_2': dt.today().day,
|
||||
'yesterday_totalizer_3': dt.today().day,
|
||||
'yesterday_total_totalizer_1': 0,
|
||||
'yesterday_total_midnight_totalizer_1': 0,
|
||||
'yesterday_total_totalizer_2': 0,
|
||||
'yesterday_total_midnight_totalizer_2': 0,
|
||||
'yesterday_total_totalizer_3': 0,
|
||||
'yesterday_total_midnight_totalizer_3': 0
|
||||
}
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
"""
|
||||
try:
|
||||
if time.time() - PERSIST['state_timer'] >= 60:
|
||||
GPIO.output(Relay_Ch1, GPIO.HIGH)
|
||||
PERSIST['state'] = False
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
elif PERSIST['state']:
|
||||
GPIO.output(Relay_Ch1, GPIO.LOW)
|
||||
else:
|
||||
GPIO.output(Relay_Ch1, GPIO.HIGH)
|
||||
except:
|
||||
PERSIST['state'] = False
|
||||
PERSIST['state_time'] = time.time()
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
"""
|
||||
drive_enabled = PERSIST['drive_enabled']
|
||||
try:
|
||||
isVFD = PERSIST['isVFD']
|
||||
except:
|
||||
PERSIST['isVFD'] = False
|
||||
isVFD = PERSIST['isVFD']
|
||||
persistence.store(PERSIST)
|
||||
|
||||
try:
|
||||
plc_ip = PERSIST['plc_ip']
|
||||
except:
|
||||
PERSIST['plc_ip'] = '192.168.1.12'
|
||||
plc_ip = PERSIST['plc_ip']
|
||||
persistence.store(PERSIST)
|
||||
|
||||
from Tags import tags
|
||||
|
||||
CHANNELS = tags
|
||||
from runtimeStats import RuntimeStats as RTS
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
"""Start class required by Meshify."""
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None,
|
||||
companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
"""Initialize the driver."""
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q,
|
||||
mcu=mcu, companyId=companyId, offset=offset,
|
||||
mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.version = "28"
|
||||
self.finished = threading.Event()
|
||||
self.force_send = False
|
||||
self.public_ip_address = ""
|
||||
self.private_ip_address = ""
|
||||
self.public_ip_address_last_checked = 0
|
||||
self.status = ""
|
||||
self.alarm = ""
|
||||
self.rts = RTS()
|
||||
self.rts.loadDataFromFile()
|
||||
self.rts.saveDataToFile()
|
||||
|
||||
threading.Thread.start(self)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
"""Register the driver."""
|
||||
# self.sendtodb("log", "BOOM! Booted.", 0)
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""Actually run the driver."""
|
||||
for i in range(0, WAIT_FOR_CONNECTION_SECONDS):
|
||||
print("PiFlow driver will start in {} seconds".format(WAIT_FOR_CONNECTION_SECONDS - i))
|
||||
time.sleep(1)
|
||||
log.info("BOOM! Starting PiFlow driver...")
|
||||
|
||||
#self._check_watchdog()
|
||||
self._check_ip_address()
|
||||
|
||||
self.nodes["PiFlow_0199"] = self
|
||||
|
||||
send_loops = 0
|
||||
|
||||
while True:
|
||||
now = time.time()
|
||||
if self.force_send:
|
||||
log.warning("FORCE SEND: TRUE")
|
||||
if isVFD:
|
||||
status = {}
|
||||
for chan in CHANNELS[:24]: #build status/alarm strings
|
||||
try:
|
||||
val = chan.read()
|
||||
chan.check(val, self.force_send)
|
||||
status[chan.mesh_name] = chan.value
|
||||
except Exception as e:
|
||||
log.warning("An error occured in status check: {}".format(e))
|
||||
try:
|
||||
self.sendStatus(status)
|
||||
except Exception as e:
|
||||
log.warning("An error occured in send status: {}".format(e))
|
||||
for chan in CHANNELS[24:]:
|
||||
try:
|
||||
val = chan.read()
|
||||
if chan.mesh_name in ['totalizer_1','totalizer_2','totalizer_3']:
|
||||
right_now = dt.today()
|
||||
today_total, yesterday_total = self.totalize(val, PERSIST['yesterday_'+chan.mesh_name], right_now.day, right_now.hour, right_now.minute, PERSIST['yesterday_total_midnight_'+chan.mesh_name], PERSIST['yesterday_total_'+chan.mesh_name], chan.mesh_name)
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1,"today_"+chan.mesh_name, today_total,0,'PiFlow')
|
||||
self.sendtodbDev(1,"yesterday_"+chan.mesh_name, yesterday_total,0,'PiFlow')
|
||||
self.sendtodbDev(1, chan.mesh_name + "_units", "BBL",0,'PiFlow')
|
||||
elif chan.mesh_name == "frequency":
|
||||
if val > 0:
|
||||
self.rts.addHertzDataPoint(val)
|
||||
self.rts.saveDataToFile()
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1, "avgFrequency30Days", self.rts.calculateAverageHertzMultiDay(),0,'PiFlow')
|
||||
else:
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
except Exception as e:
|
||||
log.warning("An error occured in data collection: {}".format(e))
|
||||
else:
|
||||
for chan in CHANNELS:
|
||||
try:
|
||||
if chan.mesh_name == "remote_start":
|
||||
val = PERSIST["state"]
|
||||
else:
|
||||
val = None
|
||||
for _ in range(3):
|
||||
temp = chan.read()
|
||||
if not temp == None:
|
||||
val = temp
|
||||
if val == None:
|
||||
log.info("No modbus data sending previous value")
|
||||
val = chan.value
|
||||
if chan.mesh_name in ['totalizer_1','totalizer_2','totalizer_3']:
|
||||
right_now = dt.today()
|
||||
today_total, yesterday_total = self.totalize(val, PERSIST['yesterday_'+chan.mesh_name], right_now.day, right_now.hour, right_now.minute, PERSIST['yesterday_total_midnight_'+chan.mesh_name], PERSIST['yesterday_total_'+chan.mesh_name], chan.mesh_name)
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1,"today_"+chan.mesh_name, today_total,0,'PiFlow')
|
||||
self.sendtodbDev(1,"yesterday_"+chan.mesh_name, yesterday_total,0,'PiFlow')
|
||||
elif chan.mesh_name == "volume_flow" and not PERSIST['drive_enabled']:
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
if chan.value > 0:
|
||||
self.sendtodbDev(1, "run_status", "Running", 0, 'PiFlow')
|
||||
if not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"]:
|
||||
self.rts.startRun()
|
||||
self.rts.saveDataToFile()
|
||||
else:
|
||||
self.sendtodbDev(1,"run_status", "Stopped", 0, 'PiFlow')
|
||||
if self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"] and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["end"]:
|
||||
self.rts.endRun()
|
||||
self.rts.saveDataToFile()
|
||||
|
||||
self.sendtodbDev(1, "percentRunTime30Days", self.rts.calculateRunPercentMultiDay(), 0,'PiFlow')
|
||||
elif chan.mesh_name == "run_status":
|
||||
if "Operating" in val and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"]:
|
||||
self.rts.startRun()
|
||||
self.rts.saveDataToFile()
|
||||
elif "Stopped" in val and self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"] and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["end"]:
|
||||
self.rts.endRun()
|
||||
self.rts.saveDataToFile()
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1, "percentRunTime30Days", self.rts.calculateRunPercentMultiDay(), 0,'PiFlow')
|
||||
elif chan.mesh_name == "frequency":
|
||||
if val > 0:
|
||||
self.rts.addHertzDataPoint(val)
|
||||
self.rts.saveDataToFile()
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1, "avgFrequency30Days", self.rts.calculateAverageHertzMultiDay(),0,'PiFlow')
|
||||
elif chan.mesh_name == "remote_start":
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
PERSIST["state_timer"] = time.time()
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
else:
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
|
||||
except Exception as e:
|
||||
log.warning("An error occured: {}".format(e))
|
||||
time.sleep(3)
|
||||
|
||||
|
||||
# print("PiFlow driver still alive...")
|
||||
if self.force_send:
|
||||
if send_loops > 2:
|
||||
log.warning("Turning off force_send")
|
||||
self.force_send = False
|
||||
send_loops = 0
|
||||
else:
|
||||
send_loops += 1
|
||||
|
||||
|
||||
if (now - self.public_ip_address_last_checked) > IP_CHECK_PERIOD:
|
||||
self._check_ip_address()
|
||||
time.sleep(10)
|
||||
|
||||
def _check_ip_address(self):
|
||||
"""Check the public IP address and send to Meshify if changed."""
|
||||
self.public_ip_address_last_checked = time.time()
|
||||
test_public_ip = get_public_ip_address()
|
||||
test_public_ip = test_public_ip[:-1]
|
||||
test_private_ip = get_private_ip_address()
|
||||
if not test_public_ip == self.public_ip_address and not test_public_ip == "0.0.0.0":
|
||||
self.sendtodbDev(1, 'public_ip_address', test_public_ip, 0, 'PiFlow')
|
||||
self.public_ip_address = test_public_ip
|
||||
if not test_private_ip == self.private_ip_address:
|
||||
self.sendtodbDev(1, 'private_ip_address', test_private_ip, 0, 'PiFlow')
|
||||
self.private_ip_address = test_private_ip
|
||||
|
||||
def PiFlow_sync(self, name, value):
|
||||
"""Sync all data from the driver."""
|
||||
self.force_send = True
|
||||
# self.sendtodb("log", "synced", 0)
|
||||
return True
|
||||
|
||||
def PiFlow_flowmeternumber(self, name, unit_number):
|
||||
"""Change the unit number for the PiFlow flow meter"""
|
||||
unit_number = int(unit_number)
|
||||
if drive_enabled:
|
||||
for chan in CHANNELS[0:8]:
|
||||
chan.unit_number = unit_number
|
||||
PERSIST['flowmeter'] = unit_number
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
return True
|
||||
else:
|
||||
for chan in CHANNELS:
|
||||
chan.unit_number = unit_number
|
||||
PERSIST['flowmeter'] = unit_number
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
self.sendtodbDev(1, 'flowmeternumber', unit_number, 0,'PiFlow')
|
||||
return True
|
||||
return False
|
||||
|
||||
def PiFlow_drivenumber(self, name, unit_number):
|
||||
"""Change the unit number for the PiFlow drive"""
|
||||
unit_number = int(unit_number)
|
||||
for chan in CHANNELS[8:]:
|
||||
chan.unit_number = unit_number
|
||||
|
||||
PERSIST['drive'] = unit_number
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
self.sendtodbDev(1, 'drivenumber', unit_number, 0,'PiFlow')
|
||||
return True
|
||||
|
||||
def PiFlow_reboot(self, name, value):
|
||||
os.system('reboot')
|
||||
return True
|
||||
|
||||
def PiFlow_drive_enabled(self, name, value):
|
||||
value = int(value)
|
||||
if value == 1:
|
||||
PERSIST['drive_enabled'] = True
|
||||
else:
|
||||
PERSIST['drive_enabled'] = False
|
||||
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
self.sendtodbDev(1, 'drive_enabled', value, 0,'PiFlow')
|
||||
return True
|
||||
|
||||
def PiFlow_write(self, name, value):
|
||||
"""Write a value to the device via modbus"""
|
||||
new_val = json.loads(str(value).replace("'", '"'))
|
||||
addr_n = int(new_val['addr'])
|
||||
reg_n = int(new_val['reg'])
|
||||
val_n = new_val['val']
|
||||
for chan in CHANNELS:
|
||||
if chan.unit_number == addr_n and chan.register_number == reg_n:
|
||||
write_res = chan.write(val_n)
|
||||
|
||||
log.info("Result of PiFlow_write(self, {}, {}) = {}".format(name, value, write_res))
|
||||
return write_res
|
||||
"""
|
||||
def PiFlow_start(self, name, value):
|
||||
if isVFD:
|
||||
#do something with the plc
|
||||
log.info("Sending START signal to PLC")
|
||||
else:
|
||||
log.info("Sending START signal to Drive via relay {}".format(Relay_Ch1))
|
||||
GPIO.output(Relay_Ch1,GPIO.LOW)
|
||||
PERSIST["state"] = True
|
||||
PERSIST["state_timer"] = time.time()
|
||||
persistence.store(PERSIST,"persist.json")
|
||||
|
||||
return True
|
||||
|
||||
def PiFlow_stop(self, name, value):
|
||||
if isVFD:
|
||||
log.info("Sending STOP signal to PLC")
|
||||
#do something with the plc
|
||||
else:
|
||||
log.info("Sending STOP signal to Drive")
|
||||
GPIO.output(Relay_Ch1,GPIO.HIGH)
|
||||
PERSIST["state"] = False
|
||||
PERSIST["state_timer"] = time.time()
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
return True
|
||||
"""
|
||||
def totalize(self,val, yesterday, day, hour, minute, yesterday_total_midnight, yesterday_total,channel):
|
||||
if (yesterday_total == 0 and yesterday_total_midnight == 0) or (yesterday_total == None or yesterday_total_midnight == None):
|
||||
yesterday_total_midnight = val
|
||||
PERSIST['yesterday_total_midnight_'+channel] = yesterday_total_midnight
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
today_total = val - yesterday_total_midnight
|
||||
if hour == 0 and minute == 0 and not(day == yesterday):
|
||||
self.rts.manageTime()
|
||||
yesterday_total = today_total
|
||||
yesterday_total_midnight = val
|
||||
today_total = val - yesterday_total_midnight
|
||||
yesterday = day
|
||||
PERSIST['yesterday_'+channel] = yesterday
|
||||
PERSIST['yesterday_total_'+channel] = yesterday_total
|
||||
PERSIST['yesterday_total_midnight_'+channel] = yesterday_total_midnight
|
||||
persistence.store(PERSIST,'persist.json')
|
||||
|
||||
return today_total,yesterday_total
|
||||
|
||||
def sendStatus(self,status):
|
||||
status_string = ""
|
||||
|
||||
fault_codes = {
|
||||
0: "",
|
||||
2: "Auxiliary Input",
|
||||
3: "Power Loss",
|
||||
4: "UnderVoltage",
|
||||
5: "OverVoltage",
|
||||
7: "Motor Overload",
|
||||
8: "Heatsink OvrTemp",
|
||||
9: "Thermister OvrTemp",
|
||||
10: "DynBrake OverTemp",
|
||||
12: "HW OverCurrent",
|
||||
13: "Ground Fault",
|
||||
14: "Ground Warning",
|
||||
15: "Load Loss",
|
||||
17: "Input Phase Loss",
|
||||
18: "Motor PTC Trip",
|
||||
19: "Task Overrun",
|
||||
20: "TorqPrv Spd Band",
|
||||
21: "Output PhaseLoss",
|
||||
24: "Decel Inhibit",
|
||||
25: "OverSpeed Limit",
|
||||
26: "Brake Slipped",
|
||||
27: "Torq Prove Cflct",
|
||||
28: "TP Encls Config",
|
||||
29: "Analog In Loss",
|
||||
33: "AuRsts Exhausted",
|
||||
35: "IPM OverCurrent",
|
||||
36: "SW OverCurrent",
|
||||
38: "Phase U to Grnd",
|
||||
39: "Phase V to Grnd",
|
||||
40: "Phase W to Grnd",
|
||||
41: "Phase UV Short",
|
||||
42: "Phase VW Short",
|
||||
43: "Phase WU Short",
|
||||
44: "Phase UNegToGrnd",
|
||||
45: "Phase VNegToGrnd",
|
||||
46: "Phase WNegToGrnd",
|
||||
48: "System Defaulted",
|
||||
49: "Drive Powerup",
|
||||
51: "Clr Fault Queue",
|
||||
55: "Ctrl Bd Overtemp",
|
||||
59: "Invalid Code",
|
||||
61: "Shear Pin 1",
|
||||
62: "Shear Pin 2",
|
||||
64: "Drive Overload",
|
||||
67: "Pump Off",
|
||||
71: "Port 1 Adapter",
|
||||
72: "Port 2 Adapter",
|
||||
73: "Port 3 Adapter",
|
||||
74: "Port 4 Adapter",
|
||||
75: "Port 5 Adapter",
|
||||
76: "Port 6 Adapter",
|
||||
77: "IR Volts Range",
|
||||
78: "FluxAmpsRef Rang",
|
||||
79: "Excessive Load",
|
||||
80: "AutoTune Aborted",
|
||||
81: "Port 1 DPI Loss",
|
||||
82: "Port 2 DPI Loss",
|
||||
83: "Port 3 DPI Loss",
|
||||
84: "Port 4 DPI Loss",
|
||||
85: "Port 5 DPI Loss",
|
||||
86: "Port 6 DPI Loss",
|
||||
87: "Ixo VoltageRange",
|
||||
91: "Pri VelFdbk Loss",
|
||||
93: "Hw Enable Check",
|
||||
94: "Alt VelFdbk Loss",
|
||||
95: "Aux VelFdbk Loss",
|
||||
96: "PositionFdbkLoss",
|
||||
97: "Auto Tach Switch",
|
||||
100: "Parameter Chksum",
|
||||
101: "PwrDn NVS Blank",
|
||||
102: "NVS Not Blank",
|
||||
103: "PwrDn Nvs Incomp",
|
||||
104: "Pwr Brd Checksum",
|
||||
106: "Incompat MCB-PB",
|
||||
107: "Replaced MCB-PB",
|
||||
108: "Anlg Cal Chksum",
|
||||
110: "Ivld Pwr Bd Data",
|
||||
111: "PwrBd Invalid ID",
|
||||
112: "PwrBd App MinVer",
|
||||
113: "Tracking DataErr",
|
||||
115: "PwrDn Table Full",
|
||||
116: "PwrDnEntry2Large",
|
||||
117: "PwrDn Data Chksm",
|
||||
118: "PwrBd PwrDn Chks",
|
||||
124: "App ID Changed",
|
||||
125: "Using Backup App",
|
||||
134: "Start on PowerUp",
|
||||
137: "Ext Prechrg Err",
|
||||
138: "Precharge Open",
|
||||
141: "Autn Enc Angle",
|
||||
142: "Autn Spd Rstrct",
|
||||
143: "AutoTune CurReg",
|
||||
144: "AutoTune Inertia",
|
||||
145: "AutoTune Travel",
|
||||
13037: "Net IO Timeout"
|
||||
}
|
||||
|
||||
if status['vfd_active'] == "Stopped":
|
||||
status_string = status_string + status['vfd_active'] + "; " + status['vfd_ready']
|
||||
else:
|
||||
status_string = status_string + status['vfd_active']
|
||||
if status['vfd_rev']:
|
||||
status_string = status_string + '; ' + status['vfd_rev']
|
||||
if status['vfd_fwd']:
|
||||
status_string = status_string + '; ' + status['vfd_fwd']
|
||||
if status['vfd_atreference']:
|
||||
status_string = status_string + '; ' + status['vfd_atreference']
|
||||
alarm_string = ""
|
||||
if status['vfd_faulted'] == "Drive Faulted":
|
||||
status_string = status_string + '; ' + status['vfd_faulted']
|
||||
if status['vfd_commloss']:
|
||||
alarm_string = alarm_string + '; ' + status['vfd_commloss']
|
||||
if status['vfd_fbkalarm']:
|
||||
alarm_string = alarm_string + '; ' + status['vfd_fbkalarm']
|
||||
if status['vfd_faultcode']:
|
||||
alarm_string = alarm_string + '; ' + "Fault: {} Fault code: {}".format(fault_codes[status['vfd_faultcode']],str(status['vfd_faultcode']))
|
||||
if status['minspeedalarm']:
|
||||
alarm_string = alarm_string + '; ' + status['minspeedalarm']
|
||||
if status['pumpedoff']:
|
||||
alarm_string = alarm_string + '; ' + status['pumpedoff']
|
||||
if status['lockedout']:
|
||||
alarm_string = alarm_string + '; ' + status['lockedout']
|
||||
if status['tubingpressurehi']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurehi']
|
||||
if status['tubingpressurehihi']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurehihi']
|
||||
if status['tubingpressurelo']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurelo']
|
||||
if status['tubingpressurelolo']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurelolo']
|
||||
if status['flowmeterhihi']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterhihi']
|
||||
if status['flowmeterhi']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterhi']
|
||||
if status['flowmeterlolo']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterlolo']
|
||||
if status['flowmeterlo']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterlo']
|
||||
if status['fluidlevellolo']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevellolo']
|
||||
if status['fluidlevello']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevello']
|
||||
if status['fluidlevelhi']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevelhi']
|
||||
if status['fluidlevelhihi']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevelhihi']
|
||||
try:
|
||||
if status_string and status_string[0] == '; ':
|
||||
status_string = status_string[1:]
|
||||
if status_string and status_string[-1] == '; ':
|
||||
status_string = status_string[:-1]
|
||||
if alarm_string and alarm_string[0] == '; ':
|
||||
alarm_string = alarm_string[1:]
|
||||
if alarm_string and alarm_string[-1] == '; ':
|
||||
alarm_string = alarm_string[:-1]
|
||||
except Exception as e:
|
||||
log.warning("Error in send status semicolon: {}".format(e))
|
||||
|
||||
|
||||
if self.status != status_string:
|
||||
self.status = status_string
|
||||
log.info("Sending {} for {}".format(status_string, 'run_status'))
|
||||
self.sendtodbDev(1, 'run_status', status_string, 0, 'PiFlow')
|
||||
if "Operating" in status_string and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"]:
|
||||
self.rts.startRun()
|
||||
self.rts.saveDataToFile()
|
||||
elif "Stopped" in status_string and self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"] and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["end"]:
|
||||
self.rts.endRun()
|
||||
self.rts.saveDataToFile()
|
||||
self.sendtodbDev(1, "percentRunTime30Days", self.rts.calculateRunPercentMultiDay(), 0,'PiFlow')
|
||||
if self.alarm != alarm_string:
|
||||
self.alarm = alarm_string
|
||||
log.info("Sending {} for {}".format(alarm_string, 'fault_a'))
|
||||
self.sendtodbDev(1, 'fault_a', alarm_string, 0 , 'PiFlow')
|
||||
|
||||
|
||||
|
||||
|
||||
92
meshifyDrivers/piflow/Tags.py
Normal file
92
meshifyDrivers/piflow/Tags.py
Normal file
@@ -0,0 +1,92 @@
|
||||
from Channel import PLCChannel,Channel, ModbusChannel, status_codes, fault_code_a, fault_code_b, volume_units, totalizer_units
|
||||
import persistence
|
||||
|
||||
PERSIST = persistence.load('persist.json')
|
||||
flowmeter_unit_number = PERSIST['flowmeter']
|
||||
drive_enabled = PERSIST['drive_enabled']
|
||||
isVFD = PERSIST['isVFD']
|
||||
if drive_enabled:
|
||||
drive_unit_number = PERSIST['drive']
|
||||
try:
|
||||
plc_ip = PERSIST['plc_ip']
|
||||
except:
|
||||
PERSIST['plc_ip'] = '192.168.1.12'
|
||||
persistence.store(PERSIST)
|
||||
if isVFD:
|
||||
tags = [
|
||||
PLCChannel(plc_ip,'vfd_atreference','sts_VFD_AtReference','BOOL',0,3600,map_={0: "", 1: "At speed"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_rev','sts_VFD_REV','BOOL',0,3600,map_={0: "", 1: "Operating in Reverse"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_fwd','sts_VFD_FWD','BOOL',0,3600,map_={0: "", 1: "Operating in Forward"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_active','sts_VFD_Active','BOOL',0,3600,map_={0: "Stopped", 1: "Running"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_ready','sts_VFD_Ready','BOOL',0,3600,map_={0: "Drive Not Ready", 1: "Drive Ready"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_faultcode','sts_VFD_FaultCode','REAL',0,3600, plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_faulted','AL0_VFD','BOOL',0,3600,map_={0: "", 1: "Drive Faulted"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_commloss','AL0_VFDComLoss','BOOL',0,3600,map_={0: "", 1: "Drive Comms Loss"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_fbkalarm','AL0_VFD_FBAlarm','BOOL',0,3600,map_={0: "", 1: "Drive Lost Feedback"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurehi','AL0_TubingPressureHi','BOOL',0,3600,map_={0: "", 1: "High Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurehihi','AL0_TubingPressureHiHi','BOOL',0,3600,map_={0: "", 1: "High High Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurelo','AL0_TubingPressureLo','BOOL',0,3600,map_={0: "", 1: "Low Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurelolo','AL0_TubingPressureLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterhihi','AL0_FlowMeterHiHi','BOOL',0,3600,map_={0: "", 1: "High High FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterhi','AL0_FlowMeterHi','BOOL',0,3600,map_={0: "", 1: "High FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterlolo','AL0_FlowMeterLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterlo','AL0_FlowMeterLo','BOOL',0,3600,map_={0: "", 1: "Low FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'minspeedalarm','AL0_MinSpeedAlarm','BOOL',0,3600,map_={0: "", 1: "Drive not able to maintain min speed"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'pumpedoff','AL0_PumpedOff','BOOL',0,3600,map_={0: "", 1: "Pumped Off"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevellolo','AL0_FluidLevelLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevello','AL0_FluidLevelLo','BOOL',0,3600,map_={0: "", 1: "Low Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevelhi','AL0_FluidLevelHi','BOOL',0,3600,map_={0: "", 1: "High Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevelhihi','AL0_FluidLevelHiHi','BOOL',0,3600,map_={0: "", 1: "High High Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'lockedout','AlarmLockOut','BOOL',0,3600,map_={0: "", 1: "Locked Out Repeated Alarms"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'volume_flow','Val_FlowmeterFR','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'current','val_VFD_OutputCurrent','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'frequency','val_VFD_ActualSpeed','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'pid_feedback','val_FluidLevel','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'totalizer_1','Val_FlowMeterT1','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'totalizer_2','Val_FlowMeterT2','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'totalizer_3','Val_FlowMeterT3','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'volume_flow_units','CMD_FlowMeterUnit','BOOL',1,3600,map_={0: "GPM", 1: "BPD"},plc_type='Micro800')
|
||||
]
|
||||
else:
|
||||
if drive_enabled:
|
||||
tags = [
|
||||
ModbusChannel('volume_flow', 3873, 'FLOAT', 10, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_1', 2609, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_2', 2809, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_3', 3009, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('volume_flow_units', 2102, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=volume_units),
|
||||
ModbusChannel('totalizer_1_units', 4603, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_2_units', 4604, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_3_units', 4605, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('remote_start', 0000, 'INTEGER', 1, 86400, channel_size=1, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('run_status', 772, 'STRING', 0, 3600, channel_size=1, unit_number=drive_unit_number, transform_fn=status_codes),
|
||||
ModbusChannel('frequency', 784, 'INTEGER', 2, 3600, channel_size=2, unit_number=drive_unit_number,scaling=2 ),
|
||||
ModbusChannel('current', 783, 'INTEGER', 2, 3600, channel_size=2, unit_number=drive_unit_number,scaling=1 ),
|
||||
ModbusChannel('fault_a', 815, 'STRING', 1, 3600, channel_size=1, unit_number=drive_unit_number,transform_fn=fault_code_a),
|
||||
ModbusChannel('fault_b', 816, 'STRING', 1, 3600, channel_size=1, unit_number=drive_unit_number,transform_fn=fault_code_b),
|
||||
ModbusChannel('pid_ref', 791, 'INTEGER', 5, 3600, channel_size=1, unit_number=drive_unit_number,scaling=1),
|
||||
ModbusChannel('pid_feedback', 792, 'INTEGER', 5, 3600, channel_size=1, unit_number=drive_unit_number,scaling=1),
|
||||
ModbusChannel('motor_rated_current', 4896, 'INTEGER', 300, 86400, channel_size=1, unit_number=drive_unit_number,scaling=1),
|
||||
ModbusChannel('sleep_delay', 4924, 'INTEGER', 5, 86400, channel_size=1, unit_number=drive_unit_number, scaling=1)
|
||||
]
|
||||
else:
|
||||
tags = [
|
||||
ModbusChannel('volume_flow', 3873, 'FLOAT', 10, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_1', 2609, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_2', 2809, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_3', 3009, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('volume_flow_units', 2102, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=volume_units),
|
||||
ModbusChannel('totalizer_1_units', 4603, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_2_units', 4604, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_3_units', 4605, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('remote_start', 0000, 'BOOL', 1, 86400, channel_size=1, unit_number=flowmeter_unit_number)
|
||||
]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
BIN
meshifyDrivers/piflow/__pycache__/runtimeStats.cpython-39.pyc
Normal file
BIN
meshifyDrivers/piflow/__pycache__/runtimeStats.cpython-39.pyc
Normal file
Binary file not shown.
17
meshifyDrivers/piflow/config.txt
Normal file
17
meshifyDrivers/piflow/config.txt
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
|
||||
"driverFileName":"PiFlow.py",
|
||||
"deviceName":"piflow",
|
||||
"driverId":"0280",
|
||||
"releaseVersion":"28",
|
||||
"files": {
|
||||
"file1":"PiFlow.py",
|
||||
"file2":"Channel.py",
|
||||
"file3":"file_logger.py",
|
||||
"file4":"Tags.py",
|
||||
"file5":"utilities.py",
|
||||
"file6":"persistence.py",
|
||||
"file7":"runtimeStats.py"
|
||||
}
|
||||
|
||||
}
|
||||
205
meshifyDrivers/piflow/device_base.py
Normal file
205
meshifyDrivers/piflow/device_base.py
Normal file
@@ -0,0 +1,205 @@
|
||||
import types
|
||||
import traceback
|
||||
import binascii
|
||||
import threading
|
||||
import time
|
||||
import thread
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
class deviceBase():
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
self.offset = offset
|
||||
self.company = companyId
|
||||
self.name = name
|
||||
self.number = number
|
||||
self.q = Q
|
||||
self.deviceName = name + '_[' + mac + ':' + number[0:2] + ':' + number[2:] + ']!'
|
||||
self.chName = "M1" + '_[' + mac + ':'
|
||||
self.chName2 = '_[' + mac + ':'
|
||||
print 'device name is:'
|
||||
print self.deviceName
|
||||
mac2 = mac.replace(":", "")
|
||||
self.mac = mac2.upper()
|
||||
self.address = 1
|
||||
self.debug = True
|
||||
self.mcu = mcu
|
||||
self.firstRun = True
|
||||
self.mqtt = mqtt
|
||||
self.nodes = Nodes
|
||||
#local dictionary of derived nodes ex: localNodes[tank_0199] = self
|
||||
self.localNodes = {}
|
||||
os.system("chmod 777 /root/reboot")
|
||||
os.system("echo nameserver 8.8.8.8 > /etc/resolv.conf")
|
||||
|
||||
|
||||
def sendtodbLoc(self, ch, channel, value, timestamp, deviceName, mac):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
#make the techname
|
||||
lst = textwrap.wrap(str(mac), width=2)
|
||||
tech = ""
|
||||
for i in range(len(lst)):
|
||||
tech += lst[i].lower() + ":"
|
||||
|
||||
|
||||
chName2 = '_[' + tech
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
if len(ch) > 2:
|
||||
ch = ch[:-2]
|
||||
|
||||
dname = deviceName + chName2 + str(ch) + ":98]!"
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDevJSON(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLora(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if ":" not in ch:
|
||||
ch = ch[0:2] + ":" + ch[2:4]
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch).replace(':', "")
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + "]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDev(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendToTB(self, payload):
|
||||
topic = 'v1/devices/me/telemetry'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendToTBAttributes(self, payload):
|
||||
topic = 'v1/devices/me/attributes'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendtodbCH(self, ch, channel, value, timestamp):
|
||||
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(ch)
|
||||
|
||||
dname = self.chName + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodb(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbJSON(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
def getTime(self):
|
||||
return str(int(time.time() + int(self.offset)))
|
||||
|
||||
|
||||
|
||||
|
||||
18
meshifyDrivers/piflow/file_logger.py
Normal file
18
meshifyDrivers/piflow/file_logger.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Logging setup for PiFlow"""
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import sys
|
||||
|
||||
log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s')
|
||||
log_file = './PiFlow.log'
|
||||
my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=500*1024,
|
||||
backupCount=2, encoding=None, delay=0)
|
||||
my_handler.setFormatter(log_formatter)
|
||||
my_handler.setLevel(logging.INFO)
|
||||
filelogger = logging.getLogger('PiFlow')
|
||||
filelogger.setLevel(logging.INFO)
|
||||
filelogger.addHandler(my_handler)
|
||||
|
||||
console_out = logging.StreamHandler(sys.stdout)
|
||||
console_out.setFormatter(log_formatter)
|
||||
filelogger.addHandler(console_out)
|
||||
20
meshifyDrivers/piflow/modbusTester.py
Normal file
20
meshifyDrivers/piflow/modbusTester.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import minimalmodbus
|
||||
|
||||
minimalmodbus.BAUDRATE = 9600
|
||||
minimalmodbus.STOPBITS = 1
|
||||
address = 123
|
||||
|
||||
instrument = minimalmodbus.Instrument('/dev/ttyS0', address) #device, modbus slave address
|
||||
instrument.debug = True
|
||||
for _ in range(3):
|
||||
try:
|
||||
value = instrument.read_float(3873) #register -1 for float
|
||||
print("Flow Rate from Flow Meter: {}".format(value))
|
||||
except Exception as e:
|
||||
print("Error: {}".format(e))
|
||||
|
||||
try:
|
||||
value = instrument.read_float(784) #register -1 for float
|
||||
print("Frequency from Drive: {}".format(value))
|
||||
except Exception as e:
|
||||
print("Error: {}".format(e))
|
||||
21
meshifyDrivers/piflow/persistence.py
Normal file
21
meshifyDrivers/piflow/persistence.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Data persistance functions."""
|
||||
# if more advanced persistence is needed, use a sqlite database
|
||||
import json
|
||||
|
||||
|
||||
def load(filename="persist.json"):
|
||||
"""Load persisted settings from the specified file."""
|
||||
try:
|
||||
with open(filename, 'r') as persist_file:
|
||||
return json.load(persist_file)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def store(persist_obj, filename="persist.json"):
|
||||
"""Store the persisting settings into the specified file."""
|
||||
try:
|
||||
with open(filename, 'w') as persist_file:
|
||||
return json.dump(persist_obj, persist_file, indent=4)
|
||||
except Exception:
|
||||
return False
|
||||
172
meshifyDrivers/piflow/runtimeStats.py
Normal file
172
meshifyDrivers/piflow/runtimeStats.py
Normal file
@@ -0,0 +1,172 @@
|
||||
from datetime import datetime as dt
|
||||
import time
|
||||
import json
|
||||
import math
|
||||
|
||||
class RuntimeStats:
|
||||
|
||||
def __init__(self):
|
||||
self.runs = {}
|
||||
self.currentRun = 0
|
||||
self.today = ""
|
||||
self.todayString = ""
|
||||
|
||||
def manageTime(self):
|
||||
if self.todayString != dt.strftime(dt.today(), "%Y-%m-%d"):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.mktime(dt.strptime(self.todayString + " 23:59:59", "%Y-%m-%d %H:%M:%S").timetuple())
|
||||
self.addDay()
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
while (dt.strptime(days[-1],"%Y-%m-%d") - dt.strptime(days[0], "%Y-%m-%d")).days > 30:
|
||||
self.removeDay(day=days[0])
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
|
||||
def addHertzDataPoint(self, frequency):
|
||||
if frequency > 0:
|
||||
self.manageTime()
|
||||
try:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"].append(frequency)
|
||||
except:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"] = [frequency]
|
||||
|
||||
def startRun(self):
|
||||
if self.checkRunning():
|
||||
self.endRun()
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] = time.time()
|
||||
|
||||
def endRun(self):
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.time()
|
||||
self.currentRun += 1
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def checkRunning(self):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def addDay(self):
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
self.currentRun = 1
|
||||
self.runs[self.todayString] = {}
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def countRunsDay(self, day=None):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
return len(self.runs[day].keys())
|
||||
|
||||
def countRunsMultiDay(self, numDays=30):
|
||||
total_runs = 0
|
||||
for day in list(self.runs.keys()):
|
||||
total_runs += self.countRunsDay(day=day)
|
||||
return total_runs
|
||||
|
||||
def calculateAverageHertzDay(self, day=None, returnArray=False):
|
||||
dayFrequencies = []
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
try:
|
||||
dayFrequencies += self.runs[day][run]["frequencies"]
|
||||
except Exception as e:
|
||||
print("{} missing frequency data for {}".format(day,run))
|
||||
if returnArray:
|
||||
return dayFrequencies
|
||||
return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)
|
||||
|
||||
def calculateAverageHertzMultiDay(self, numDays=30):
|
||||
self.manageTime()
|
||||
frequencies = []
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
try:
|
||||
frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)
|
||||
except Exception as e:
|
||||
print("{} missing frequency data".format(day))
|
||||
if len(frequencies):
|
||||
return round(math.fsum(frequencies)/len(frequencies), 2)
|
||||
return 0
|
||||
|
||||
def calculateRunTimeDay(self, day=None, convertToHours=True):
|
||||
total_time = 0
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
total_time = self.runs[day][run]["end"] - self.runs[day][run]["start"] + total_time
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):
|
||||
total_time = 0
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
total_time += self.calculateRunTimeDay(day=day, convertToHours=False)
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunPercentDay(self, day=None, precise=False):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
if precise:
|
||||
return (self.calculateRunTimeDay(day=day)/24) * 100
|
||||
return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)
|
||||
|
||||
|
||||
def calculateRunPercentMultiDay(self, numDays=30, precise=False):
|
||||
self.manageTime()
|
||||
if precise:
|
||||
return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100
|
||||
return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)
|
||||
|
||||
def removeDay(self, day=None):
|
||||
if not day:
|
||||
raise Exception("Day can not be None")
|
||||
print("removing day {}".format(day))
|
||||
del self.runs[day]
|
||||
|
||||
def convertSecondstoHours(self, seconds):
|
||||
return round(seconds / (60*60),2)
|
||||
|
||||
def loadDataFromFile(self, filePath="./runtimestats.json"):
|
||||
try:
|
||||
with open(filePath, "r") as f:
|
||||
temp = json.load(f)
|
||||
self.runs = temp["data"]
|
||||
self.currentRun = temp["current_run"]
|
||||
self.today = dt.strptime(temp["current_day"], "%Y-%m-%d")
|
||||
self.todayString = temp["current_day"]
|
||||
self.manageTime()
|
||||
except:
|
||||
print("Could not find file at {}".format(filePath))
|
||||
print("creating file")
|
||||
self.addDay()
|
||||
try:
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
def saveDataToFile(self, filePath="./runtimestats.json"):
|
||||
try:
|
||||
print("Saving Runs")
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
638
meshifyDrivers/piflow/runtimestats.ipynb
Normal file
638
meshifyDrivers/piflow/runtimestats.ipynb
Normal file
@@ -0,0 +1,638 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from datetime import datetime as dt\n",
|
||||
"from datetime import timedelta as td\n",
|
||||
"from time import sleep\n",
|
||||
"import json\n",
|
||||
"import math"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 106,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"class RuntimeStats:\n",
|
||||
" \n",
|
||||
" def __init__(self):\n",
|
||||
" self.runs = {}\n",
|
||||
" self.currentRun = 0\n",
|
||||
" self.today = \"\"\n",
|
||||
" self.todayString = \"\"\n",
|
||||
"\n",
|
||||
" def manageTime(self):\n",
|
||||
" if self.today != dt.today():\n",
|
||||
" if self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"start\"] and not self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"]:\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"] = dt.timestamp(dt.strptime(self.todayString + \" 23:59:59\", \"%Y-%m-%d %H:%M:%S\"))\n",
|
||||
" self.addDay()\n",
|
||||
" days = list(self.runs.keys())\n",
|
||||
" days.sort()\n",
|
||||
" while (dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days > 30:\n",
|
||||
" self.removeDay(day=days[0])\n",
|
||||
" days = list(self.runs.keys())\n",
|
||||
" days.sort()\n",
|
||||
"\n",
|
||||
" def addHertzDataPoint(self, frequency):\n",
|
||||
" if frequency > 0:\n",
|
||||
" self.manageTime()\n",
|
||||
" try:\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"frequencies\"].append(frequency)\n",
|
||||
" except:\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"frequencies\"] = [frequency]\n",
|
||||
"\n",
|
||||
" def startRun(self):\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"start\"] = dt.timestamp(dt.now())\n",
|
||||
"\n",
|
||||
" def endRun(self):\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"] = dt.timestamp(dt.now()) \n",
|
||||
"\n",
|
||||
" def addDay(self):\n",
|
||||
" self.today = dt.today()\n",
|
||||
" self.todayString = dt.strftime(self.today, \"%Y-%m-%d\")\n",
|
||||
" self.currentRun = 1\n",
|
||||
" self.runs[self.todayString] = {}\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)] = {\"start\":0, \"end\": 0, \"frequencies\":[]}\n",
|
||||
"\n",
|
||||
" def countRunsDay(self, day=None):\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" return len(self.runs[day].keys())\n",
|
||||
"\n",
|
||||
" def countRunsMultiDay(self, numDays=30):\n",
|
||||
" total_runs = 0\n",
|
||||
" for day in list(self.runs.keys()):\n",
|
||||
" total_runs += self.countRunsDay(day=day)\n",
|
||||
" return total_runs\n",
|
||||
"\n",
|
||||
" def calculateAverageHertzDay(self, day=None, returnArray=False):\n",
|
||||
" dayFrequencies = []\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" for run in list(self.runs[day].keys()):\n",
|
||||
" try:\n",
|
||||
" dayFrequencies += self.runs[day][run][\"frequencies\"]\n",
|
||||
" except Exception as e:\n",
|
||||
" print(\"{} missing frequency data for {}\".format(day,run))\n",
|
||||
" if returnArray:\n",
|
||||
" return dayFrequencies\n",
|
||||
" return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)\n",
|
||||
"\n",
|
||||
" def calculateAverageHertzMultiDay(self, numDays=30):\n",
|
||||
" frequencies = []\n",
|
||||
" for day in list(self.runs.keys()):\n",
|
||||
" try:\n",
|
||||
" frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)\n",
|
||||
" except Exception as e:\n",
|
||||
" print(\"{} missing frequency data\".format(day))\n",
|
||||
" return round(math.fsum(frequencies)/len(frequencies), 2)\n",
|
||||
" \n",
|
||||
" def calculateRunTimeDay(self, day=None, convertToHours=True):\n",
|
||||
" total_time = 0\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" for run in list(self.runs[day].keys()):\n",
|
||||
" total_time = self.runs[day][run][\"end\"] - self.runs[day][run][\"start\"] + total_time\n",
|
||||
" if convertToHours:\n",
|
||||
" return RuntimeStats.convertSecondstoHours(total_time)\n",
|
||||
" return total_time\n",
|
||||
"\n",
|
||||
" def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):\n",
|
||||
" total_time = 0\n",
|
||||
" for day in list(self.runs.keys()):\n",
|
||||
" total_time += self.calculateRunTimeDay(day=day, convertToHours=False)\n",
|
||||
" if convertToHours:\n",
|
||||
" return RuntimeStats.convertSecondstoHours(total_time)\n",
|
||||
" return total_time\n",
|
||||
" \n",
|
||||
" def calculateRunPercentDay(self, day=None, precise=False):\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" if precise:\n",
|
||||
" return (self.calculateRunTimeDay(day=day)/24) * 100\n",
|
||||
" return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" def calculateRunPercentMultiDay(self, numDays=30, precise=False):\n",
|
||||
" if precise:\n",
|
||||
" return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100\n",
|
||||
" return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)\n",
|
||||
"\n",
|
||||
" def removeDay(self, day=None):\n",
|
||||
" if not day:\n",
|
||||
" raise Exception(\"Day can not be None\")\n",
|
||||
" print(\"removing day {}\".format(day))\n",
|
||||
" del self.runs[day]\n",
|
||||
" \n",
|
||||
" def convertSecondstoHours(seconds):\n",
|
||||
" return round(seconds / (60*60),2)\n",
|
||||
"\n",
|
||||
" def loadDataFromFile(self, filePath=\"../runtimestats.json\"):\n",
|
||||
" try:\n",
|
||||
" with open(filePath, \"r\") as f:\n",
|
||||
" temp = json.load(f)\n",
|
||||
" self.runs = temp[\"data\"]\n",
|
||||
" self.currentRun = temp[\"current_run\"]\n",
|
||||
" self.today = dt.strptime(temp[\"current_day\"], \"%Y-%m-%d\")\n",
|
||||
" self.todayString = temp[\"current_day\"]\n",
|
||||
" self.manageTime()\n",
|
||||
" except FileExistsError:\n",
|
||||
" print(\"Could not find file at {}\".format(filePath))\n",
|
||||
" except FileNotFoundError:\n",
|
||||
" print(\"Could not find file at {}\".format(filePath))\n",
|
||||
" print(\"creating file\")\n",
|
||||
" try:\n",
|
||||
" with open(filePath, \"w\") as f:\n",
|
||||
" d = {\n",
|
||||
" \"current_run\": self.currentRun,\n",
|
||||
" \"current_day\": self.todayString,\n",
|
||||
" \"data\": self.runs\n",
|
||||
" }\n",
|
||||
" json.dump(d, f, indent=4)\n",
|
||||
" except Exception as e:\n",
|
||||
" print(e)\n",
|
||||
" except Exception as e:\n",
|
||||
" print(e)\n",
|
||||
"\n",
|
||||
" def saveDataToFile(self, filePath=\"../runtimestats.json\"):\n",
|
||||
" try:\n",
|
||||
" print(\"Saving Runs\")\n",
|
||||
" with open(filePath, \"w+\") as f:\n",
|
||||
" d = {\n",
|
||||
" \"current_run\": self.currentRun,\n",
|
||||
" \"current_day\": self.todayString,\n",
|
||||
" \"data\": self.runs\n",
|
||||
" }\n",
|
||||
" json.dump(d, f, indent=4)\n",
|
||||
" except FileNotFoundError:\n",
|
||||
" print(\"Could not find file at {}\".format(filePath))\n",
|
||||
" except Exception as e:\n",
|
||||
" print(e)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 107,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{}\n",
|
||||
"{'2023-01-11': {'run_1': {'start': 1673465959.694776, 'frequencies': [67, 65, 59, 62, 100], 'end': 1673475545.313309}, 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883, 'frequencies': [100, 99, 98, 87, 56, 56, 58, 67]}}, '2023-01-10': {'run_1': {'start': 1673465959.694776, 'frequencies': [67, 65, 59, 62], 'end': 1673469136.691883}, 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}}, '2023-01-09': {'run_1': {'start': 1673465959.694776, 'frequencies': [67, 65, 59, 62], 'end': 1673469136.691883}, 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}}, '2022-12-17': {'run_1': {'start': 0, 'end': 0, 'frequencies': []}}}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts = RuntimeStats()\n",
|
||||
"print(rts.runs)\n",
|
||||
"path = \"/Users/nico/Documents/test/runtimestats.json\"\n",
|
||||
"rts.loadDataFromFile(filePath=path)\n",
|
||||
"print(rts.runs)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 108,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"removing day 2022-12-17\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.manageTime()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 109,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'2023-01-11': {'run_1': {'start': 1673465959.694776,\n",
|
||||
" 'frequencies': [67, 65, 59, 62, 100],\n",
|
||||
" 'end': 1673475545.313309},\n",
|
||||
" 'run_2': {'start': 1673469145.271416,\n",
|
||||
" 'end': 1673469136.691883,\n",
|
||||
" 'frequencies': [100, 99, 98, 87, 56, 56, 58, 67]}},\n",
|
||||
" '2023-01-10': {'run_1': {'start': 1673465959.694776,\n",
|
||||
" 'frequencies': [67, 65, 59, 62],\n",
|
||||
" 'end': 1673469136.691883},\n",
|
||||
" 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}},\n",
|
||||
" '2023-01-09': {'run_1': {'start': 1673465959.694776,\n",
|
||||
" 'frequencies': [67, 65, 59, 62],\n",
|
||||
" 'end': 1673469136.691883},\n",
|
||||
" 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}},\n",
|
||||
" '2023-01-17': {'run_1': {'start': 0, 'end': 0, 'frequencies': []}}}"
|
||||
]
|
||||
},
|
||||
"execution_count": 109,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.runs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.endRun()\n",
|
||||
"print(rts.runs)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.saveDataToFile(filePath=path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.startRun()\n",
|
||||
"print(rts.runs)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.countRunsDay()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 30,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"2.66"
|
||||
]
|
||||
},
|
||||
"execution_count": 30,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunTimeDay(day=\"2023-1-11\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 31,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"11.08"
|
||||
]
|
||||
},
|
||||
"execution_count": 31,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunPercentDay(day=\"2023-1-11\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 32,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"0.61"
|
||||
]
|
||||
},
|
||||
"execution_count": 32,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunPercentMultiDay()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 33,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"4.42"
|
||||
]
|
||||
},
|
||||
"execution_count": 33,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunTimeMultiDay()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 34,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "KeyError",
|
||||
"evalue": "'2023-1-17'",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb Cell 12\u001b[0m in \u001b[0;36mRuntimeStats.addHertzDataPoint\u001b[0;34m(self, frequency)\u001b[0m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=9'>10</a>\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m---> <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=10'>11</a>\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mruns[\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtodayString][\u001b[39m\"\u001b[39m\u001b[39mrun_\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m+\u001b[39m \u001b[39mstr\u001b[39m(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mcurrentRun)][\u001b[39m\"\u001b[39m\u001b[39mfrequencies\u001b[39m\u001b[39m\"\u001b[39m]\u001b[39m.\u001b[39mappend(frequency)\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=11'>12</a>\u001b[0m \u001b[39mexcept\u001b[39;00m:\n",
|
||||
"\u001b[0;31mKeyError\u001b[0m: '2023-1-17'",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb Cell 12\u001b[0m in \u001b[0;36m<cell line: 1>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=0'>1</a>\u001b[0m rts\u001b[39m.\u001b[39;49maddHertzDataPoint(\u001b[39m67\u001b[39;49m)\n",
|
||||
"\u001b[1;32m/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb Cell 12\u001b[0m in \u001b[0;36mRuntimeStats.addHertzDataPoint\u001b[0;34m(self, frequency)\u001b[0m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=10'>11</a>\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mruns[\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mtodayString][\u001b[39m\"\u001b[39m\u001b[39mrun_\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m+\u001b[39m \u001b[39mstr\u001b[39m(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mcurrentRun)][\u001b[39m\"\u001b[39m\u001b[39mfrequencies\u001b[39m\u001b[39m\"\u001b[39m]\u001b[39m.\u001b[39mappend(frequency)\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=11'>12</a>\u001b[0m \u001b[39mexcept\u001b[39;00m:\n\u001b[0;32m---> <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=12'>13</a>\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mruns[\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtodayString][\u001b[39m\"\u001b[39m\u001b[39mrun_\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m+\u001b[39m \u001b[39mstr\u001b[39m(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mcurrentRun)][\u001b[39m\"\u001b[39m\u001b[39mfrequencies\u001b[39m\u001b[39m\"\u001b[39m] \u001b[39m=\u001b[39m [frequency]\n",
|
||||
"\u001b[0;31mKeyError\u001b[0m: '2023-1-17'"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.addHertzDataPoint(67)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"74.92\n",
|
||||
"2023-1-10 missing frequency data for run_2\n",
|
||||
"2023-1-9 missing frequency data for run_2\n",
|
||||
"70.48\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(rts.calculateAverageHertzDay(\"2023-1-11\"))\n",
|
||||
"print(rts.calculateAverageHertzMultiDay())"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"runs = {\"run_1\" : {}}\n",
|
||||
"runs[\"run_1\"][\"start\"] = dt.timestamp(dt.now())\n",
|
||||
"runs[\"run_1\"][\"end\"] = dt.timestamp(dt.now() + td(0,3600))\n",
|
||||
"\n",
|
||||
"runs[\"run_2\"] = {}\n",
|
||||
"runs[\"run_2\"][\"start\"] = dt.timestamp(dt.now() + td(0,3600) +td(0,3600))\n",
|
||||
"\n",
|
||||
"runs[\"run_2\"][\"end\"] = dt.timestamp(dt.now() + td(0,3600) +td(0,3600) + td(0,3600))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"total_time = 0\n",
|
||||
"for key in list(runs.keys()):\n",
|
||||
" total_time = runs[key][\"end\"] - runs[key][\"start\"] + total_time"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(total_time)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"today = dt.today()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 39,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"1673991101.567802\n",
|
||||
"1674021599.0\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(dt.timestamp(dt.now()))\n",
|
||||
"print(dt.timestamp(dt.strptime(rts.todayString + \" 23:59:59\", \"%Y-%m-%d %H:%M:%S\")))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 47,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'2023-01-17'"
|
||||
]
|
||||
},
|
||||
"execution_count": 47,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"dt.strftime(dt.now(), \"%Y-%m-%d\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"{str(today.year)+\"-\"+str(today.month)+\"-\"+str(today.day): {\"run_1\": {\"start\": dt.timestamp(dt.now()), \"end\": dt.timestamp(dt.now()), \"hz\": [56,60,57,61,59,57,60]}}}"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"a = [1,2,4,5]\n",
|
||||
"b = [6,7,8,8,89]\n",
|
||||
"c = []\n",
|
||||
"c += a\n",
|
||||
"c += b\n",
|
||||
"print(math.fsum(c)/len(c))\n",
|
||||
"print((math.fsum(a)/len(a) + math.fsum(b)/len(b))/2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 35,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"works\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"t = {\"this\": \"test1\", \"that\": {\"is\": \"a bigger test\"}}\n",
|
||||
"del t[\"this\"]\n",
|
||||
"try:\n",
|
||||
" t[\"those\"]\n",
|
||||
"except:\n",
|
||||
" print(\"works\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 59,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Saving Runs\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.addDay()\n",
|
||||
"rts.saveDataToFile(filePath=path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 78,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"2023-01-17\n",
|
||||
"2022-12-17\n",
|
||||
"31\n",
|
||||
"31\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"days = list(rts.runs.keys())\n",
|
||||
"days.sort()\n",
|
||||
"print(days[-1])\n",
|
||||
"print(days[0])\n",
|
||||
"print((dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days)\n",
|
||||
"if (dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days > 30:\n",
|
||||
" print((dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 110,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"True\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"s = \"Operating in Forward;\"\n",
|
||||
"if \"Operating\" in s:\n",
|
||||
" print(True)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "webkit",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.12 | packaged by conda-forge | (main, Mar 24 2022, 23:25:14) \n[Clang 12.0.1 ]"
|
||||
},
|
||||
"orig_nbformat": 4,
|
||||
"vscode": {
|
||||
"interpreter": {
|
||||
"hash": "22238595996e71d7b27448e64f75d285aa95d1182295fdd30f75905446cf0091"
|
||||
}
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
11
meshifyDrivers/piflow/testRTS.py
Normal file
11
meshifyDrivers/piflow/testRTS.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from runtimeStats import RuntimeStats as RTS
|
||||
|
||||
|
||||
rts = RTS()
|
||||
rts.loadDataFromFile("/Users/nico/Documents/test/runtimestats.json")
|
||||
rts.startRun()
|
||||
#rts.endRun()
|
||||
rts.saveDataToFile("/Users/nico/Documents/test/runtimestats.json")
|
||||
print(rts.runs)
|
||||
|
||||
|
||||
63
meshifyDrivers/piflow/utilities.py
Normal file
63
meshifyDrivers/piflow/utilities.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Utility functions for the driver."""
|
||||
import socket
|
||||
import struct
|
||||
import urllib
|
||||
import contextlib
|
||||
def get_private_ip_address():
|
||||
"""Find the private IP Address of the host device."""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
sock.connect(("8.8.8.8", 80))
|
||||
ip_address = sock.getsockname()[0]
|
||||
sock.close()
|
||||
except Exception as e:
|
||||
return e
|
||||
|
||||
return ip_address
|
||||
|
||||
def get_public_ip_address():
|
||||
ip_address = "0.0.0.0"
|
||||
try:
|
||||
with contextlib.closing(urllib.urlopen("http://checkip.amazonaws.com")) as url:
|
||||
ip_address = url.read()
|
||||
except Exception as e:
|
||||
print("Could not resolve address: {}".format(e))
|
||||
return ip_address
|
||||
return ip_address
|
||||
|
||||
|
||||
def int_to_float16(int_to_convert):
|
||||
"""Convert integer into float16 representation."""
|
||||
bin_rep = ('0' * 16 + '{0:b}'.format(int_to_convert))[-16:]
|
||||
sign = 1.0
|
||||
if int(bin_rep[0]) == 1:
|
||||
sign = -1.0
|
||||
exponent = float(int(bin_rep[1:6], 2))
|
||||
fraction = float(int(bin_rep[6:17], 2))
|
||||
|
||||
if exponent == float(0b00000):
|
||||
return sign * 2 ** -14 * fraction / (2.0 ** 10.0)
|
||||
elif exponent == float(0b11111):
|
||||
if fraction == 0:
|
||||
return sign * float("inf")
|
||||
return float("NaN")
|
||||
frac_part = 1.0 + fraction / (2.0 ** 10.0)
|
||||
return sign * (2 ** (exponent - 15)) * frac_part
|
||||
|
||||
|
||||
def ints_to_float(int1, int2):
|
||||
"""Convert 2 registers into a floating point number."""
|
||||
mypack = struct.pack('>HH', int1, int2)
|
||||
f_unpacked = struct.unpack('>f', mypack)
|
||||
print("[{}, {}] >> {}".format(int1, int2, f_unpacked[0]))
|
||||
return f_unpacked[0]
|
||||
|
||||
|
||||
def degf_to_degc(temp_f):
|
||||
"""Convert deg F to deg C."""
|
||||
return (temp_f - 32.0) * (5.0/9.0)
|
||||
|
||||
|
||||
def degc_to_degf(temp_c):
|
||||
"""Convert deg C to deg F."""
|
||||
return temp_c * 1.8 + 32.0
|
||||
300
meshifyDrivers/plcfreshwater/Channel.py
Normal file
300
meshifyDrivers/plcfreshwater/Channel.py
Normal file
@@ -0,0 +1,300 @@
|
||||
"""Define Meshify channel class."""
|
||||
import time
|
||||
import urllib
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from pycomm.cip.cip_base import CommError, DataError
|
||||
from file_logger import filelogger as log
|
||||
|
||||
|
||||
|
||||
TAG_DATAERROR_SLEEPTIME = 5
|
||||
|
||||
def binarray(intval):
|
||||
"""Split an integer into its bits."""
|
||||
bin_string = '{0:08b}'.format(intval)
|
||||
bin_arr = [i for i in bin_string]
|
||||
bin_arr.reverse()
|
||||
return bin_arr
|
||||
|
||||
|
||||
def read_tag(addr, tag, plc_type="CLX"):
|
||||
"""Read a tag from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
val = clx.read_tag(tag)
|
||||
clx.close()
|
||||
return val
|
||||
except DataError as err:
|
||||
clx.close()
|
||||
time.sleep(TAG_DATAERROR_SLEEPTIME)
|
||||
log.error("Data Error during readTag({}, {}): {}".format(addr, tag, err))
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
#clx.close()
|
||||
log.error("Could not connect during readTag({}, {})".format(addr, tag))
|
||||
except AttributeError as err:
|
||||
clx.close()
|
||||
log.error("AttributeError during readTag({}, {}): \n{}".format(addr, tag, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
def read_array(addr, tag, start, end, plc_type="CLX"):
|
||||
"""Read an array from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
val = clx.read_tag(tag_w_index)
|
||||
arr_vals.append(round(val[0], 4))
|
||||
if arr_vals:
|
||||
clx.close()
|
||||
return arr_vals
|
||||
else:
|
||||
log.error("No length for {}".format(addr))
|
||||
clx.close()
|
||||
return False
|
||||
except Exception:
|
||||
log.error("Error during readArray({}, {}, {}, {})".format(addr, tag, start, end))
|
||||
err = clx.get_status()
|
||||
clx.close()
|
||||
log.error(err)
|
||||
clx.close()
|
||||
|
||||
|
||||
def write_tag(addr, tag, val, plc_type="CLX"):
|
||||
"""Write a tag value to the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
initial_val = clx.read_tag(tag)
|
||||
write_status = clx.write_tag(tag, val, initial_val[1])
|
||||
clx.close()
|
||||
return write_status
|
||||
except DataError as err:
|
||||
clx_err = clx.get_status()
|
||||
clx.close()
|
||||
log.error("--\nDataError during writeTag({}, {}, {}, plc_type={}) -- {}\n{}\n".format(addr, tag, val, plc_type, err, clx_err))
|
||||
|
||||
except CommError as err:
|
||||
clx_err = clx.get_status()
|
||||
log.error("--\nCommError during write_tag({}, {}, {}, plc_type={})\n{}\n--".format(addr, tag, val, plc_type, err))
|
||||
#clx.close()
|
||||
return False
|
||||
|
||||
|
||||
class Channel(object):
|
||||
"""Holds the configuration for a Meshify channel."""
|
||||
|
||||
def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
self.mesh_name = mesh_name
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def __str__(self):
|
||||
"""Create a string for the channel."""
|
||||
return "{}\nvalue: {}, last_send_time: {}".format(self.mesh_name, self.value, self.last_send_time)
|
||||
|
||||
def check(self, new_value, force_send=False):
|
||||
"""Check to see if the new_value needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if new_value is None:
|
||||
new_value = self.value
|
||||
if self.data_type == 'BOOL' or self.data_type == 'STRING':
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif self.value != new_value:
|
||||
if self.map_:
|
||||
if (not self.value == self.map_[new_value]) or force_send:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
else:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
else:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif abs(self.value - new_value) > self.chg_threshold:
|
||||
send_needed = True
|
||||
send_reason = "change threshold"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
if send_needed:
|
||||
self.last_value = self.value
|
||||
if self.map_:
|
||||
try:
|
||||
self.value = self.map_[new_value]
|
||||
except KeyError:
|
||||
log.error("Cannot find a map value for {} in {} for {}".format(new_value, self.map_, self.mesh_name))
|
||||
self.value = new_value
|
||||
else:
|
||||
self.value = new_value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
pass
|
||||
|
||||
|
||||
def identity(sent):
|
||||
"""Return exactly what was sent to it."""
|
||||
return sent
|
||||
|
||||
|
||||
class ModbusChannel(Channel):
|
||||
"""Modbus channel object."""
|
||||
|
||||
def __init__(self, mesh_name, register_number, data_type, chg_threshold, guarantee_sec, channel_size=1, map_=False, write_enabled=False, transform_fn=identity):
|
||||
"""Initialize the channel."""
|
||||
super(ModbusChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.mesh_name = mesh_name
|
||||
self.register_number = register_number
|
||||
self.channel_size = channel_size
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.transform_fn = transform_fn
|
||||
|
||||
def read(self, mbsvalue):
|
||||
"""Return the transformed read value."""
|
||||
return self.transform_fn(mbsvalue)
|
||||
|
||||
|
||||
class PLCChannel(Channel):
|
||||
"""PLC Channel Object."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False, plc_type='CLX'):
|
||||
"""Initialize the channel."""
|
||||
super(PLCChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.plc_type = plc_type
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
plc_value = None
|
||||
if self.plc_tag and self.plc_ip:
|
||||
read_value = read_tag(self.plc_ip, self.plc_tag, plc_type=self.plc_type)
|
||||
if read_value:
|
||||
plc_value = read_value[0]
|
||||
|
||||
return plc_value
|
||||
|
||||
|
||||
class BoolArrayChannels(Channel):
|
||||
"""Hold the configuration for a set of boolean array channels."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
super(BoolArrayChannels, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def compare_values(self, new_val_dict):
|
||||
"""Compare new values to old values to see if the values need storing."""
|
||||
send = False
|
||||
for idx in new_val_dict:
|
||||
try:
|
||||
if new_val_dict[idx] != self.last_value[idx]:
|
||||
send = True
|
||||
except KeyError:
|
||||
log.error("Key Error in self.compare_values for index {}".format(idx))
|
||||
send = True
|
||||
return send
|
||||
|
||||
def read(self, force_send=False):
|
||||
"""Read the value and check to see if needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.plc_tag:
|
||||
val = read_tag(self.plc_ip, self.plc_tag)
|
||||
if val:
|
||||
bool_arr = binarray(val[0])
|
||||
new_val = {}
|
||||
for idx in self.map_:
|
||||
try:
|
||||
new_val[self.map_[idx]] = bool_arr[idx]
|
||||
except KeyError:
|
||||
log.error("Not able to get value for index {}".format(idx))
|
||||
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.compare_values(new_val):
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
|
||||
if send_needed:
|
||||
self.value = new_val
|
||||
self.last_value = self.value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
33
meshifyDrivers/plcfreshwater/Tags.py
Normal file
33
meshifyDrivers/plcfreshwater/Tags.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from Channel import PLCChannel, ModbusChannel
|
||||
from plcfreshwater import PLC_IP_ADDRESS
|
||||
|
||||
tags = [
|
||||
PLCChannel(PLC_IP_ADDRESS, "scaled_flow_meter","Scaled_Flow_Meter","REAL", 10, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "scaled_pressure_transducer","Scaled_Pressure_Transducer","REAL", 3, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_hand_input","Raw_Hand_Input","BOOL", 1, 7200, plc_type="Micro800", map_={0: "Off", 1: "On", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_auto_input","Raw_Auto_Input","BOOL", 1, 7200, plc_type="Micro800", map_={0: "Off", 1: "On", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_run_status","Raw_Run_Status","BOOL", 1, 3600, plc_type="Micro800", map_={0: "Stopped", 1: "Running", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_local_start","Raw_Local_Start","BOOL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "lifetime_flow_meter_gal","Lifetime_Flow_Meter_Gal","REAL", 1000, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "spt_flow_meter_unit","SPT_Flow_Meter_Unit","BOOL", 0, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_overload_status", "Raw_Overload_Status", "BOOL", 0, 3600, plc_type="Micro800", map_={0: "Good", 1: "Down on Overload Tripped", None: "N/A"})
|
||||
]
|
||||
|
||||
tags_totalizer = [
|
||||
PLCChannel(PLC_IP_ADDRESS, "scaled_flow_meter","Scaled_Flow_Meter","REAL", 10, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "scaled_pressure_transducer","Scaled_Pressure_Transducer","REAL", 3, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_hand_input","Raw_Hand_Input","BOOL", 1, 7200, plc_type="Micro800", map_={0: "Off", 1: "On", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_auto_input","Raw_Auto_Input","BOOL", 1, 7200, plc_type="Micro800", map_={0: "Off", 1: "On", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_run_status","Raw_Run_Status","BOOL", 1, 3600, plc_type="Micro800", map_={0: "Stopped", 1: "Running", None: "N/A"}),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_local_start","Raw_Local_Start","BOOL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_yesterday_gal","Totalizer_FM_Yesterday_Total_Gal","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_day_gal","Totalizer_FM_Current_Day_Total_Gal","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_last_month_gal","Totalizer_FM_Last_Month_Gal","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_month_gal","Totalizer_FM_Current_Month_Gal","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_yesterday_bbls","Totalizer_FM_Yesterday_Total_BBLs","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_day_bbls","Totalizer_FM_Current_Day_Total_BBLs","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_last_month_bbls","Totalizer_FM_Last_Month_BBLs","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "total_fm_month_bbls","Totalizer_FM_Current_Month_BBLs","REAL", 1, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "spt_flow_meter_unit","SPT_Flow_Meter_Unit","BOOL", 0, 3600, plc_type="Micro800"),
|
||||
PLCChannel(PLC_IP_ADDRESS, "raw_overload_status", "Raw_Overload_Status", "BOOL", 0, 3600, plc_type="Micro800", map_={0: "Good", 1: "Down on Overload Tripped", None: "N/A"})
|
||||
]
|
||||
14
meshifyDrivers/plcfreshwater/config.txt
Normal file
14
meshifyDrivers/plcfreshwater/config.txt
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"files": {
|
||||
"file3": "file_logger.py",
|
||||
"file2": "Channel.py",
|
||||
"file1": "plcfreshwater.py",
|
||||
"file6": "persistence.py",
|
||||
"file5": "utilities.py",
|
||||
"file4": "Tags.py"
|
||||
},
|
||||
"deviceName": "plcfreshwater",
|
||||
"releaseVersion": "16",
|
||||
"driverFileName": "plcfreshwater.py",
|
||||
"driverId": "0100"
|
||||
}
|
||||
360
meshifyDrivers/plcfreshwater/device_base.py
Normal file
360
meshifyDrivers/plcfreshwater/device_base.py
Normal file
@@ -0,0 +1,360 @@
|
||||
import types
|
||||
import traceback
|
||||
import binascii
|
||||
import threading
|
||||
import time
|
||||
import thread
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import textwrap
|
||||
import Queue
|
||||
import json
|
||||
|
||||
|
||||
class deviceBase():
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
self.offset = offset
|
||||
self.company = companyId
|
||||
self.name = name
|
||||
self.number = number
|
||||
self.q = Q
|
||||
self.deviceName = name + '_[' + mac + ':' + number[0:2] + ':' + number[2:] + ']!'
|
||||
self.chName = "M1" + '_[' + mac + ':'
|
||||
self.chName2 = '_[' + mac + ':'
|
||||
print 'device name is:'
|
||||
print self.deviceName
|
||||
mac2 = mac.replace(":", "")
|
||||
self.mac = mac2.upper()
|
||||
self.address = 1
|
||||
self.debug = True
|
||||
self.mcu = mcu
|
||||
self.firstRun = True
|
||||
self.mqtt = mqtt
|
||||
self.nodes = Nodes
|
||||
#local dictionary of derived nodes ex: localNodes[tank_0199] = self
|
||||
self.localNodes = {}
|
||||
os.system("chmod 777 /root/reboot")
|
||||
os.system("echo nameserver 8.8.8.8 > /etc/resolv.conf")
|
||||
#Queue for imcoming sets
|
||||
self.loraQ = Queue.Queue()
|
||||
|
||||
self.knownIDs = []
|
||||
thread.start_new_thread(self.getSetsThread, ())
|
||||
|
||||
def getSetsThread(self):
|
||||
|
||||
while True:
|
||||
try:
|
||||
item = self.loraQ.get(block=True, timeout=600)
|
||||
try:
|
||||
print "here is the item from the sets q"
|
||||
print item
|
||||
if len(item) == 2:
|
||||
techname = str(json.loads(item[1])[0]['payload']['name'].split(".")[0])
|
||||
channel = str(json.loads(item[1])[0]['payload']['name'].split(".")[1])
|
||||
name = techname.split("_")[0]
|
||||
id = techname.split("_")[1][1:-2].replace(":","").upper()
|
||||
value = json.loads(item[1])[0]['payload']['value']
|
||||
msgId = json.loads(item[1])[0]['msgId']
|
||||
|
||||
print channel, value, id, name, msgId
|
||||
success = self.specificSets(channel, value, id, name)
|
||||
|
||||
if success == True:
|
||||
print "SUCCESS ON SET"
|
||||
if int(msgId) == 0:
|
||||
return
|
||||
lc = self.getTime()
|
||||
|
||||
value = str(self.mac) + " Success Setting: " + channel + " To: " + value
|
||||
msg = """[ { "value":"%s", "timestamp":"%s", "msgId":"%s" } ]""" % (value, str(lc), msgId)
|
||||
print value
|
||||
print msg
|
||||
topic = "meshify/responses/" + str(msgId)
|
||||
print topic
|
||||
self.q.put([topic, str(msg), 2])
|
||||
|
||||
|
||||
else:
|
||||
|
||||
lc = self.getTime()
|
||||
if success == False:
|
||||
reason = "(Internal Gateway/Device Error)"
|
||||
else:
|
||||
reason = success
|
||||
value = str(self.mac) + " Failed Setting: " + channel + " To: " + value + " " + reason
|
||||
msg = """[ { "value":"%s", "timestamp":"%s", "msgId":"%s" } ]""" % (value, str(lc), msgId)
|
||||
topic = "meshify/responses/" + msgId
|
||||
self.q.put([topic, str(msg), 2])
|
||||
|
||||
except:
|
||||
if int(msgId) == 0:
|
||||
return
|
||||
lc = self.getTime()
|
||||
value = str(self.mac) + " Failed Setting: " + channel + " To: " + value + " (No Callback Found)"
|
||||
msg = """[ { "value":"%s", "timestamp":"%s", "msgId":"%s" } ]""" % (value, str(lc), msgId)
|
||||
topic = "meshify/responses/" + msgId
|
||||
self.q.put([topic, str(msg), 2])
|
||||
print 'no Set callback found for channel: ' + funcName
|
||||
|
||||
except:
|
||||
print "sets queue timeout, restarting..."
|
||||
|
||||
|
||||
def sendtodbDevLora(self, id, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
|
||||
mac = self.mac
|
||||
|
||||
if deviceName == "mainMeshify":
|
||||
zigmac = "_[01:00:00:00:00:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
else:
|
||||
zigmac = "_[00:00:00:00:00:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
dname = deviceName + zigmac
|
||||
|
||||
#define dname, make id into techname and mac
|
||||
if id not in self.knownIDs:
|
||||
self.knownIDs.append(id)
|
||||
self.mcu.xbees[dname] = self.loraQ
|
||||
|
||||
#meshify/db/330/C493000354FB/ilora/c493000354fb2A6E/a1-v
|
||||
#[ { "value":"0.5635", "timestamp":"1486039316" } ]
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLocLora(self, id, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
|
||||
mac = id
|
||||
while len(mac) < 12:
|
||||
mac = "0" + mac
|
||||
if deviceName == "mainMeshify":
|
||||
zigmac = "_[01:00:00:00:00:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
else:
|
||||
zigmac = "_[00:00:00:00:00:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
dname = deviceName + zigmac
|
||||
|
||||
#define dname, make id into techname and mac
|
||||
if id not in self.knownIDs:
|
||||
self.knownIDs.append(id)
|
||||
topic = str(("meshify/sets/" + str(self.company) + "/" + mac + "/#"))
|
||||
self.mqtt.subscribe(topic, 0)
|
||||
topic = str(("meshify/sets/" + "1" + "/" + mac + "/#"))
|
||||
self.mqtt.subscribe(topic, 0)
|
||||
self.mcu.xbees[dname] = self.loraQ
|
||||
|
||||
#meshify/db/330/C493000354FB/ilora/c493000354fb2A6E/a1-v
|
||||
#[ { "value":"0.5635", "timestamp":"1486039316" } ]
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLocLoraCom(self, id, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
|
||||
mac = "1" + id
|
||||
while len(mac) < 12:
|
||||
mac = "0" + mac
|
||||
|
||||
if deviceName == "mainMeshify":
|
||||
zigmac = "_[01:00:00:00:00:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
else:
|
||||
zigmac = "_[00:00:00:00:01:" + id[0:2] + ":" + id[2:4] + ":" + id[4:6] + "]!"
|
||||
dname = deviceName + zigmac
|
||||
|
||||
#define dname, make id into techname and mac
|
||||
if id not in self.knownIDs:
|
||||
self.knownIDs.append(id)
|
||||
topic = str(("meshify/sets/" + str(self.company) + "/" + mac + "/#"))
|
||||
self.mqtt.subscribe(topic, 0)
|
||||
topic = str(("meshify/sets/" + "1" + "/" + mac + "/#"))
|
||||
self.mqtt.subscribe(topic, 0)
|
||||
self.mcu.xbees[dname] = self.loraQ
|
||||
|
||||
#meshify/db/330/C493000354FB/ilora/c493000354fb2A6E/a1-v
|
||||
#[ { "value":"0.5635", "timestamp":"1486039316" } ]
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLoc(self, ch, channel, value, timestamp, deviceName, mac):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
#make the techname
|
||||
lst = textwrap.wrap(str(mac), width=2)
|
||||
tech = ""
|
||||
for i in range(len(lst)):
|
||||
tech += lst[i].lower() + ":"
|
||||
|
||||
|
||||
chName2 = '_[' + tech
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
if len(ch) > 2:
|
||||
ch = ch[:-2]
|
||||
|
||||
dname = deviceName + chName2 + str(ch) + ":98]!"
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDevJSON(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLora(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if ":" not in ch:
|
||||
ch = ch[0:2] + ":" + ch[2:4]
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch).replace(':', "")
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + "]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDev(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbCH(self, ch, channel, value, timestamp):
|
||||
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(ch)
|
||||
|
||||
dname = self.chName + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodb(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbJSON(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
def getTime(self):
|
||||
return str(int(time.time() + int(self.offset)))
|
||||
|
||||
|
||||
|
||||
|
||||
19
meshifyDrivers/plcfreshwater/file_logger.py
Normal file
19
meshifyDrivers/plcfreshwater/file_logger.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Logging setup for plcfreshwater"""
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import sys
|
||||
from plcfreshwater import TOPIC_MAC
|
||||
|
||||
log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s')
|
||||
log_file = './plcfreshwater_{}.log'.format(TOPIC_MAC)
|
||||
my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=500*1024,
|
||||
backupCount=2, encoding=None, delay=0)
|
||||
my_handler.setFormatter(log_formatter)
|
||||
my_handler.setLevel(logging.INFO)
|
||||
filelogger = logging.getLogger('plcfreshwater')
|
||||
filelogger.setLevel(logging.INFO)
|
||||
filelogger.addHandler(my_handler)
|
||||
|
||||
console_out = logging.StreamHandler(sys.stdout)
|
||||
console_out.setFormatter(log_formatter)
|
||||
filelogger.addHandler(console_out)
|
||||
21
meshifyDrivers/plcfreshwater/persistence.py
Normal file
21
meshifyDrivers/plcfreshwater/persistence.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Data persistance functions."""
|
||||
# if more advanced persistence is needed, use a sqlite database
|
||||
import json
|
||||
|
||||
|
||||
def load(filename="persist.json"):
|
||||
"""Load persisted settings from the specified file."""
|
||||
try:
|
||||
with open(filename, 'r') as persist_file:
|
||||
return json.load(persist_file)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def store(persist_obj, filename="persist.json"):
|
||||
"""Store the persisting settings into the specified file."""
|
||||
try:
|
||||
with open(filename, 'w') as persist_file:
|
||||
return json.dump(persist_obj, persist_file, indent=4)
|
||||
except Exception:
|
||||
return False
|
||||
500
meshifyDrivers/plcfreshwater/plcfreshwater.py
Normal file
500
meshifyDrivers/plcfreshwater/plcfreshwater.py
Normal file
@@ -0,0 +1,500 @@
|
||||
"""Driver for plcfreshwater"""
|
||||
import logging
|
||||
import threading
|
||||
import json
|
||||
import time
|
||||
from random import randint
|
||||
import os
|
||||
from device_base import deviceBase
|
||||
import persistence
|
||||
from utilities import get_public_ip_address, get_private_ip_address
|
||||
from datetime import datetime as dt
|
||||
_ = None
|
||||
|
||||
# GLOBAL VARIABLES
|
||||
WAIT_FOR_CONNECTION_SECONDS = 10
|
||||
IP_CHECK_PERIOD = 60
|
||||
PLC_IP_ADDRESS = ""
|
||||
TOPIC_MAC = ""
|
||||
|
||||
|
||||
# PERSISTENCE FILE
|
||||
IP_TABLE = persistence.load('persist.json')
|
||||
if not IP_TABLE:
|
||||
IP_TABLE = {
|
||||
"000000000001":"192.168.1.201",
|
||||
"000000000002":"192.168.1.202",
|
||||
"000000000003":"192.168.1.203",
|
||||
"000000000004":"192.168.1.211",
|
||||
"000000000005":"192.168.1.210",
|
||||
"000000000006":"192.168.1.208",
|
||||
"000000000007":"192.168.1.209",
|
||||
"000000000008":"192.168.1.208",
|
||||
"000000000009":"192.168.1.209",
|
||||
"000000000010":"192.168.1.210",
|
||||
"000000000011":"192.168.1.211",
|
||||
"000000000012":"192.168.1.212",
|
||||
"000000000013":"192.168.1.213",
|
||||
"000000000014":"192.168.1.214",
|
||||
"000000000015":"192.168.1.215",
|
||||
"000000000016":"192.168.1.216"
|
||||
}
|
||||
persistence.store(IP_TABLE, 'persist.json')
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
"""Start class required by Meshify."""
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None,
|
||||
companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
"""Initialize the driver."""
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q,
|
||||
mcu=mcu, companyId=companyId, offset=offset,
|
||||
mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.version = "16"
|
||||
self.finished = threading.Event()
|
||||
self.force_send = False
|
||||
self.public_ip_address = ""
|
||||
self.public_ip_address_last_checked = 0
|
||||
self.private_ip_address = ""
|
||||
self.plcip = ""
|
||||
self.ping_counter = 0
|
||||
self.plc_ping_status = 'Default'
|
||||
self.flowing = False
|
||||
self.totalizing = False
|
||||
self.totals_counter = 0
|
||||
threading.Thread.start(self)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
"""Register the driver."""
|
||||
# self.sendtodb("log", "BOOM! Booted.", 0)
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""Actually run the driver."""
|
||||
|
||||
for i in range(0, WAIT_FOR_CONNECTION_SECONDS):
|
||||
print("plcfreshwater driver will start in {} seconds".format(WAIT_FOR_CONNECTION_SECONDS - i))
|
||||
time.sleep(1)
|
||||
|
||||
global TOPIC_MAC, PLC_IP_ADDRESS, log, write_tag, read_tag, PERSIST
|
||||
|
||||
TOPIC_MAC = self.mac
|
||||
|
||||
from file_logger import filelogger as log
|
||||
log.info("plcfreshwater startup")
|
||||
log.info("BOOM! Starting plcfreshwater driver...")
|
||||
|
||||
self._check_ip_address()
|
||||
|
||||
self.nodes["plcfreshwater_0199"] = self
|
||||
|
||||
send_loops = 0
|
||||
|
||||
PLC_IP_ADDRESS = IP_TABLE[self.mac]
|
||||
self.plcip = PLC_IP_ADDRESS
|
||||
log.info("PLC IP is {}".format(self.plcip))
|
||||
self.sendtodbDev(1, 'plc_ip_address', self.plcip, 0, 'plcfreshwater')
|
||||
PERSIST = persistence.load('totalizers_{}.json'.format(self.mac))
|
||||
if not PERSIST:
|
||||
PERSIST = {
|
||||
'Todays': 0,
|
||||
'Yesterdays': 0,
|
||||
'Current Months': 0,
|
||||
'Previous Months': 0,
|
||||
'Monthly Holding': 0,
|
||||
'Daily Holding': 0,
|
||||
'Lifetime': 0,
|
||||
'Day': 0,
|
||||
'Month': 0,
|
||||
'Last Report': 0,
|
||||
'Totalizers': False
|
||||
}
|
||||
persistence.store(PERSIST, 'totalizers_{}.json'.format(self.mac))
|
||||
from Channel import PLCChannel, ModbusChannel,read_tag, write_tag, TAG_DATAERROR_SLEEPTIME
|
||||
from Tags import tags, tags_totalizer
|
||||
if PERSIST["Totalizers"]:
|
||||
CHANNELS = tags_totalizer
|
||||
else:
|
||||
CHANNELS = tags
|
||||
while True:
|
||||
now = time.time()
|
||||
if self.force_send:
|
||||
log.warning("FORCE SEND: TRUE")
|
||||
if int(time.time()) % 600 == 0 or self.force_send:
|
||||
if self.force_send:
|
||||
payload = {"ts": time.time()*1000, "values": {}}
|
||||
else:
|
||||
payload = {"ts": round(time.time()/600)*600*1000, "values": {}}
|
||||
resetPayload = {"ts": "", "values": {}}
|
||||
dayReset, weekReset, monthReset, yearReset = False, False, False, False
|
||||
for chan in CHANNELS:
|
||||
try:
|
||||
val = chan.read()
|
||||
if chan.mesh_name == "lifetime_flow_meter_gal":
|
||||
payload["values"]["day_volume"], dayReset = self.totalizeDay(val)
|
||||
#payload["values"]["week_volume"], weekReset = self.totalizeWeek(val)
|
||||
payload["values"]["month_volume"], monthReset = self.totalizeMonth(val)
|
||||
#payload["values"]["year_volume"], yearReset = self.totalizeYear(val)
|
||||
else:
|
||||
if chan.mesh_name == "scaled_flow_meter":
|
||||
if val > 0:
|
||||
self.flowing = True
|
||||
else:
|
||||
self.flowing = False
|
||||
|
||||
payload["values"][chan.mesh_name] = val
|
||||
#time.sleep(TAG_DATAERROR_SLEEPTIME) # sleep to allow Micro800 to handle ENET requests
|
||||
except Exception as e:
|
||||
log.error("Something went wrong in read: {}".format(e))
|
||||
self.check_totals_reset(self.flowing,self.totalizing)
|
||||
# print("plcfreshwater driver still alive...")
|
||||
try:
|
||||
plc_ping = os.system("ping -c 1 " + IP_TABLE[self.mac] + " > /dev/null 2>&1")
|
||||
except Exception as e:
|
||||
log.error("something went wrong in ping: {}".format(e))
|
||||
if plc_ping == 0:
|
||||
if not self.plc_ping_status == "OK":
|
||||
payload["values"]["plc_ping"] = "OK"
|
||||
self.plc_ping_status = "OK"
|
||||
else:
|
||||
if not self.plc_ping_status == "Comms Error to PLC":
|
||||
payload["values"]["plc_ping"] = "Comms Error to PLC"
|
||||
self.plc_ping_status = 'Comms Error to PLC'
|
||||
|
||||
self.sendToTB(json.dumps(payload))
|
||||
self.sendToTBAttributes(json.dumps({"latestReportTime": round(time.time()/600)*600*1000}))
|
||||
if dayReset:
|
||||
resetPayload["values"]["yesterday_volume"] = payload["values"]["day_volume"]
|
||||
resetPayload["values"]["day_volume"] = 0
|
||||
if weekReset:
|
||||
resetPayload["values"]["last_week_volume"] = payload["values"]["week_volume"]
|
||||
resetPayload["values"]["week_volume"] = 0
|
||||
if monthReset:
|
||||
resetPayload["values"]["last_month_volume"] = payload["values"]["month_volume"]
|
||||
resetPayload["values"]["month_volume"] = 0
|
||||
if yearReset:
|
||||
resetPayload["values"]["last_year_volume"] = payload["values"]["year_volume"]
|
||||
resetPayload["values"]["year_volume"] = 0
|
||||
|
||||
if resetPayload["values"]:
|
||||
resetPayload["ts"] = 1 + round(time.time()/600)*600*1000
|
||||
self.sendToTB(json.dumps(resetPayload))
|
||||
|
||||
time.sleep(10)
|
||||
if self.force_send:
|
||||
if send_loops > 2:
|
||||
log.warning("Turning off force_send")
|
||||
self.force_send = False
|
||||
send_loops = 0
|
||||
else:
|
||||
send_loops += 1
|
||||
|
||||
|
||||
if (now - self.public_ip_address_last_checked) > IP_CHECK_PERIOD:
|
||||
self._check_ip_address()
|
||||
|
||||
|
||||
def _check_ip_address(self):
|
||||
"""Check the public IP address and send to Meshify if changed."""
|
||||
self.public_ip_address_last_checked = time.time()
|
||||
test_public_ip = get_public_ip_address()
|
||||
test_public_ip = test_public_ip
|
||||
test_private_ip = get_private_ip_address()
|
||||
if not test_public_ip == self.public_ip_address and not test_public_ip == "0.0.0.0":
|
||||
self.sendtodbDev(1, 'public_ip_address', test_public_ip, 0, 'plcfreshwater')
|
||||
self.public_ip_address = test_public_ip
|
||||
if not test_private_ip == self.private_ip_address:
|
||||
self.sendtodbDev(1, 'private_ip_address', test_private_ip, 0, 'plcfreshwater')
|
||||
self.private_ip_address = test_private_ip
|
||||
hostname = "8.8.8.8"
|
||||
response = 1
|
||||
try:
|
||||
response = os.system("ping -c 1 " + hostname + " > /dev/null 2>&1")
|
||||
except Exception as e:
|
||||
print("Something went wrong in ping: {}".format(e))
|
||||
|
||||
#and then check the response...
|
||||
if response == 0:
|
||||
print(hostname, 'is up!')
|
||||
self.ping_counter = 0
|
||||
else:
|
||||
print(hostname, 'is down!')
|
||||
self.ping_counter += 1
|
||||
|
||||
if self.ping_counter >= 3:
|
||||
print("Rebooting because no internet detected")
|
||||
os.system('reboot')
|
||||
|
||||
def check_totals_reset(self, flowing, totalizing):
|
||||
if flowing and not totalizing:
|
||||
self.totals_counter = self.totals_counter + 1
|
||||
else:
|
||||
self.totals_counter = 0
|
||||
if self.totals_counter >= 3:
|
||||
self.fix_totals()
|
||||
log.info("Would've run fix_totals!!!!")
|
||||
self.totals_counter = 0
|
||||
|
||||
def fix_totals(self):
|
||||
Daily_Holding = PERSIST["Daily Holding"] - PERSIST["Monthly Holding"]
|
||||
new_lifetime = PERSIST["Lifetime"] - PERSIST["Monthly Holding"]
|
||||
resp = write_tag(self.plcip, "Lifetime_Flow_Meter_Gal", new_lifetime, plc_type="Micro800")
|
||||
if resp == True:
|
||||
PERSIST["Daily Holding"] = Daily_Holding
|
||||
PERSIST["Monthly Holding"] = 0.0
|
||||
PERSIST["Lifetime"] = new_lifetime
|
||||
persistence.store(PERSIST, 'totalizers_{}.json'.format(self.mac))
|
||||
log.info("RESETTING TOTALIZERS!!!")
|
||||
|
||||
|
||||
def plcfreshwater_sync(self, name, value):
|
||||
"""Sync all data from the driver."""
|
||||
self.force_send = True
|
||||
# self.sendtodb("log", "synced", 0)
|
||||
return True
|
||||
|
||||
def plcfreshwater_writeplctag(self, name, value):
|
||||
"""Write a value to the PLC."""
|
||||
from Channel import write_tag
|
||||
new_val = json.loads(str(value).replace("'", '"'))
|
||||
tag_n = str(new_val['tag']) # "cmd_Start"
|
||||
val_n = new_val['val']
|
||||
write_res = write_tag(str(PLC_IP_ADDRESS), tag_n, val_n, plc_type="Micro800")
|
||||
print("Result of plcfreshwater_writeplctag(self, {}, {}) = {}".format(name, value, write_res))
|
||||
if write_res is None:
|
||||
write_res = "Error writing to PLC..."
|
||||
return write_res
|
||||
|
||||
def convertPersist(original_json):
|
||||
# Mapping of keys
|
||||
key_mapping = {
|
||||
'Todays': None,
|
||||
'Yesterdays': None,
|
||||
'Current Months': None,
|
||||
'Previous Months': None,
|
||||
'Monthly Holding': 'monthHolding',
|
||||
'Daily Holding': 'dayHolding',
|
||||
'Lifetime': 'lifetime',
|
||||
'Day': 'day',
|
||||
'Month': 'month',
|
||||
'Last Report': None,
|
||||
'Totalizers': 'Totalizers'
|
||||
}
|
||||
now = round(time.time()/600)*600*1000
|
||||
# Convert keys and build new JSON object
|
||||
new_json = {}
|
||||
for original_key, new_key in key_mapping.items():
|
||||
if new_key:
|
||||
new_json[new_key] = original_json[original_key]
|
||||
new_json["year"] = time.gmtime(now/1000.0).tm_year
|
||||
# Convert to JSON format
|
||||
#new_json_str = json.dumps(new_json, indent=4, sort_keys=True)
|
||||
return(new_json)
|
||||
|
||||
def saveTotalizers(self, totalizers):
|
||||
try:
|
||||
with open("/root/python_firmware/totalizers.json", "w") as t:
|
||||
json.dump(totalizers,t)
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
|
||||
def get_totalizers(self):
|
||||
saveFile = "/root/python_firmware/totalizers.json"
|
||||
# Check if the state file exists.
|
||||
if not os.path.exists(saveFile):
|
||||
return {
|
||||
"day": 0,
|
||||
"week": 0,
|
||||
"month": 0,
|
||||
"year": 0,
|
||||
"lifetime": 0,
|
||||
"dayHolding": 0,
|
||||
"weekHolding": 0,
|
||||
"monthHolding": 0,
|
||||
"yearHolding": 0
|
||||
}
|
||||
try:
|
||||
with open("/root/python_firmware/totalizers.json", "r") as t:
|
||||
totalizers = json.load(t)
|
||||
if not totalizers:
|
||||
log.info("-----INITIALIZING TOTALIZERS-----")
|
||||
totalizers = {
|
||||
"day": 0,
|
||||
"week": 0,
|
||||
"month": 0,
|
||||
"year": 0,
|
||||
"lifetime": 0,
|
||||
"dayHolding": 0,
|
||||
"weekHolding": 0,
|
||||
"monthHolding": 0,
|
||||
"yearHolding": 0
|
||||
}
|
||||
except:
|
||||
totalizers = {
|
||||
"day": 0,
|
||||
"week": 0,
|
||||
"month": 0,
|
||||
"year": 0,
|
||||
"lifetime": 0,
|
||||
"dayHolding": 0,
|
||||
"weekHolding": 0,
|
||||
"monthHolding": 0,
|
||||
"yearHolding": 0
|
||||
}
|
||||
return totalizers
|
||||
|
||||
def totalizeDay(self,lifetime):
|
||||
totalizers = self.get_totalizers()
|
||||
if "Daily Holding" in totalizers.keys():
|
||||
totalizers = self.convertPersist(totalizers)
|
||||
now = round(time.time()/600)*600*1000
|
||||
reset = False
|
||||
value = lifetime - totalizers["dayHolding"]
|
||||
if not int(time.gmtime(now/1000.0).tm_day) == int(totalizers["day"]):
|
||||
totalizers["dayHolding"] = lifetime
|
||||
totalizers["day"] = int(now.strftime("%d"))
|
||||
self.saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value,reset)
|
||||
|
||||
def totalizeWeek(self,lifetime):
|
||||
totalizers = self.get_totalizers()
|
||||
now = round(time.time()/600)*600*1000
|
||||
reset = False
|
||||
value = lifetime - totalizers["weekHolding"]
|
||||
if (not now.strftime("%U") == totalizers["week"] and now.strftime("%a") == "Sun") or totalizers["week"] == 0:
|
||||
totalizers["weekHolding"] = lifetime
|
||||
totalizers["week"] = now.strftime("%U")
|
||||
self.saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
|
||||
def totalizeMonth(self,lifetime):
|
||||
totalizers = self.get_totalizers()
|
||||
if "Month Holding" in totalizers.keys():
|
||||
totalizers = self.convertPersist(totalizers)
|
||||
now = round(time.time()/600)*600*1000
|
||||
reset = False
|
||||
value = lifetime - totalizers["monthHolding"]
|
||||
if not int(time.gmtime(now/1000.0).tm_mon) == int(totalizers["month"]):
|
||||
totalizers["monthHolding"] = lifetime
|
||||
totalizers["month"] = now.strftime("%m")
|
||||
self.saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value,reset)
|
||||
|
||||
def totalizeYear(self,lifetime):
|
||||
totalizers = self.get_totalizers()
|
||||
now = round(time.time()/600)*600*1000
|
||||
reset = False
|
||||
value = lifetime - totalizers["yearHolding"]
|
||||
if not int(time.gmtime(now/1000.0).tm_year) == int(totalizers["year"]):
|
||||
totalizers["yearHolding"] = lifetime
|
||||
totalizers["year"] = now.strftime("%Y")
|
||||
self.saveTotalizers(totalizers)
|
||||
reset = True
|
||||
return (value, reset)
|
||||
|
||||
def totalize(self, val):
|
||||
right_now = dt.today()
|
||||
month = right_now.month
|
||||
day = right_now.day
|
||||
#Totalize Today, Yesterday, Month, Last Month
|
||||
#if the stored day is 0 then it's a fresh run of this should initalize values now
|
||||
if PERSIST['Day'] == 0:
|
||||
PERSIST['Day'] = day
|
||||
PERSIST['Month'] = month
|
||||
PERSIST['Daily Holding'] = val
|
||||
PERSIST['Monthly Holding'] = val
|
||||
persistence.store(PERSIST, 'totalizers_{}.json'.format(self.mac))
|
||||
#Communication error during initialization check if lifetime has reported properly and update holdings
|
||||
if PERSIST['Daily Holding'] == None and not(val == None):
|
||||
PERSIST['Daily Holding'] = val
|
||||
PERSIST['Monthly Holding'] = val
|
||||
|
||||
try:
|
||||
if val - PERSIST["Lifetime"] > 0:
|
||||
self.totalizing = True
|
||||
else:
|
||||
self.totalizing = False
|
||||
except:
|
||||
log.error("Error while checking for totalizing")
|
||||
|
||||
try:
|
||||
if val - PERSIST['Daily Holding'] - PERSIST['Todays'] > 500 or time.time() - PERSIST['Last Report'] > 3600 or self.force_send:
|
||||
PERSIST['Todays'] = val - PERSIST['Daily Holding']
|
||||
PERSIST['Current Months'] = val - PERSIST['Monthly Holding']
|
||||
PERSIST['Lifetime'] = val
|
||||
self.sendtodbDev(1, 'total_fm_day_gal', PERSIST['Todays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_day_bbls', PERSIST['Todays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_month_gal', PERSIST['Current Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_month_bbls', PERSIST['Current Months']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_gal', PERSIST['Yesterdays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_bbls', PERSIST['Yesterdays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_gal', PERSIST['Lifetime'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_bbls', PERSIST['Lifetime']/42, 0, 'plcfreshwater')
|
||||
if self.force_send:
|
||||
self.sendtodbDev(1, 'total_fm_last_month_gal', PERSIST['Previous Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_last_month_bbls', PERSIST['Previous Months']/42, 0, 'plcfreshwater')
|
||||
PERSIST['Last Report'] = time.time()
|
||||
except:
|
||||
if time.time() - PERSIST['Last Report'] > 3600 or self.force_send:
|
||||
self.sendtodbDev(1, 'total_fm_day_gal', PERSIST['Todays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_day_bbls', PERSIST['Todays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_month_gal', PERSIST['Current Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_month_bbls', PERSIST['Current Months']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_gal', PERSIST['Yesterdays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_bbls', PERSIST['Yesterdays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_gal', PERSIST['Lifetime'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_bbls', PERSIST['Lifetime']/42, 0, 'plcfreshwater')
|
||||
if self.force_send:
|
||||
self.sendtodbDev(1, 'total_fm_last_month_gal', PERSIST['Previous Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_last_month_bbls', PERSIST['Previous Months']/42, 0, 'plcfreshwater')
|
||||
PERSIST['Last Report'] = time.time()
|
||||
|
||||
#If the current day doesn't equal the stored day roll the dailies over
|
||||
if not(day == PERSIST['Day']):
|
||||
#if a comms error use the stored values else use the latested values
|
||||
if val == None:
|
||||
PERSIST['Yesterdays'] = PERSIST['Todays']
|
||||
PERSIST['Todays'] = 0
|
||||
PERSIST['Daily Holding'] = PERSIST['Lifetime']
|
||||
else:
|
||||
PERSIST['Yesterdays'] = val - PERSIST['Daily Holding']
|
||||
PERSIST['Todays'] = 0
|
||||
PERSIST['Daily Holding'] = val
|
||||
PERSIST['Lifetime'] = val
|
||||
PERSIST['Day'] = day
|
||||
self.sendtodbDev(1, 'total_fm_day_gal', PERSIST['Todays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_day_bbls', PERSIST['Todays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_gal', PERSIST['Yesterdays'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_yesterday_bbls', PERSIST['Yesterdays']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_gal', PERSIST['Lifetime'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_bbls', PERSIST['Lifetime']/42, 0, 'plcfreshwater')
|
||||
PERSIST['Last Report'] = time.time()
|
||||
#the day has rolled over if the month also rolls over
|
||||
if not(month == PERSIST['Month']):
|
||||
#if a comms error use the stored values else use the latested values
|
||||
if val == None:
|
||||
PERSIST['Previous Months'] = PERSIST['Current Months']
|
||||
PERSIST['Current Months'] = 0
|
||||
PERSIST['Monthly Holding'] = PERSIST['Lifetime']
|
||||
else:
|
||||
PERSIST['Previous Months'] = val - PERSIST['Monthly Holding']
|
||||
PERSIST['Current Months'] = 0
|
||||
PERSIST['Monthly Holding'] = val
|
||||
PERSIST['Month'] = month
|
||||
self.sendtodbDev(1, 'total_fm_month_gal', PERSIST['Current Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_month_bbls', PERSIST['Current Months']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_last_month_gal', PERSIST['Previous Months'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'total_fm_last_month_bbls', PERSIST['Previous Months']/42, 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_gal', PERSIST['Lifetime'], 0, 'plcfreshwater')
|
||||
self.sendtodbDev(1, 'lifetime_flow_meter_bbls', PERSIST['Lifetime']/42, 0, 'plcfreshwater')
|
||||
PERSIST['Last Report'] = time.time()
|
||||
persistence.store(PERSIST, 'totalizers_{}.json'.format(self.mac))
|
||||
@@ -0,0 +1,55 @@
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from pycomm.cip.cip_base import CommError, DataError
|
||||
import time
|
||||
import sys
|
||||
TAG_DATAERROR_SLEEPTIME = 5
|
||||
|
||||
def read_tag(addr, tag, plc_type="CLX"):
|
||||
"""Read a tag from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
val = clx.read_tag(tag)
|
||||
clx.close()
|
||||
return val
|
||||
except DataError as err:
|
||||
clx.close()
|
||||
time.sleep(TAG_DATAERROR_SLEEPTIME)
|
||||
print("Data Error during readTag({}, {}): {}".format(addr, tag, err))
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
#clx.close()
|
||||
print("Could not connect during readTag({}, {})".format(addr, tag))
|
||||
except AttributeError as err:
|
||||
clx.close()
|
||||
print("AttributeError during readTag({}, {}): \n{}".format(addr, tag, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
def write_tag(addr, tag, val, plc_type="CLX"):
|
||||
"""Write a tag value to the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
initial_val = clx.read_tag(tag)
|
||||
write_status = clx.write_tag(tag, val, initial_val[1])
|
||||
clx.close()
|
||||
return write_status
|
||||
except DataError as err:
|
||||
clx_err = clx.get_status()
|
||||
clx.close()
|
||||
print("--\nDataError during writeTag({}, {}, {}, plc_type={}) -- {}\n{}\n".format(addr, tag, val, plc_type, err, clx_err))
|
||||
except CommError as err:
|
||||
clx_err = clx.get_status()
|
||||
print("--\nCommError during write_tag({}, {}, {}, plc_type={})\n{}\n--".format(addr, tag, val, plc_type, err))
|
||||
#clx.close()
|
||||
return False
|
||||
|
||||
resp = write_tag(sys.argv[1], "Lifetime_Flow_Meter_Gal", float(sys.argv[2]), "Micro800")
|
||||
if resp:
|
||||
print(read_tag(sys.argv[1], "Lifetime_Flow_Meter_Gal", "Micro800"))
|
||||
64
meshifyDrivers/plcfreshwater/utilities.py
Normal file
64
meshifyDrivers/plcfreshwater/utilities.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Utility functions for the driver."""
|
||||
import socket
|
||||
import struct
|
||||
import urllib
|
||||
import contextlib
|
||||
|
||||
def get_private_ip_address():
|
||||
"""Find the private IP Address of the host device."""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
try:
|
||||
sock.connect(("8.8.8.8", 80))
|
||||
except Exception as e:
|
||||
return e
|
||||
ip_address = sock.getsockname()[0]
|
||||
sock.close()
|
||||
return ip_address
|
||||
|
||||
def get_public_ip_address():
|
||||
ip_address = "0.0.0.0"
|
||||
try:
|
||||
with contextlib.closing(urllib.urlopen("http://checkip.amazonaws.com")) as url:
|
||||
ip_address = url.read()
|
||||
except Exception as e:
|
||||
print("could not resolve check IP: {}".format(e))
|
||||
return ip_address
|
||||
return ip_address[:-1]
|
||||
|
||||
|
||||
|
||||
def int_to_float16(int_to_convert):
|
||||
"""Convert integer into float16 representation."""
|
||||
bin_rep = ('0' * 16 + '{0:b}'.format(int_to_convert))[-16:]
|
||||
sign = 1.0
|
||||
if int(bin_rep[0]) == 1:
|
||||
sign = -1.0
|
||||
exponent = float(int(bin_rep[1:6], 2))
|
||||
fraction = float(int(bin_rep[6:17], 2))
|
||||
|
||||
if exponent == float(0b00000):
|
||||
return sign * 2 ** -14 * fraction / (2.0 ** 10.0)
|
||||
elif exponent == float(0b11111):
|
||||
if fraction == 0:
|
||||
return sign * float("inf")
|
||||
return float("NaN")
|
||||
frac_part = 1.0 + fraction / (2.0 ** 10.0)
|
||||
return sign * (2 ** (exponent - 15)) * frac_part
|
||||
|
||||
|
||||
def ints_to_float(int1, int2):
|
||||
"""Convert 2 registers into a floating point number."""
|
||||
mypack = struct.pack('>HH', int1, int2)
|
||||
f_unpacked = struct.unpack('>f', mypack)
|
||||
print("[{}, {}] >> {}".format(int1, int2, f_unpacked[0]))
|
||||
return f_unpacked[0]
|
||||
|
||||
|
||||
def degf_to_degc(temp_f):
|
||||
"""Convert deg F to deg C."""
|
||||
return (temp_f - 32.0) * (5.0/9.0)
|
||||
|
||||
|
||||
def degc_to_degf(temp_c):
|
||||
"""Convert deg C to deg F."""
|
||||
return temp_c * 1.8 + 32.0
|
||||
291
meshifyDrivers/transferlite/Channel.py
Normal file
291
meshifyDrivers/transferlite/Channel.py
Normal file
@@ -0,0 +1,291 @@
|
||||
"""Define Meshify channel class."""
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from pycomm.cip.cip_base import CommError, DataError
|
||||
import time
|
||||
|
||||
|
||||
def binarray(intval):
|
||||
"""Split an integer into its bits."""
|
||||
bin_string = '{0:08b}'.format(intval)
|
||||
bin_arr = [i for i in bin_string]
|
||||
bin_arr.reverse()
|
||||
return bin_arr
|
||||
|
||||
|
||||
def read_tag(addr, tag, plc_type="CLX"):
|
||||
"""Read a tag from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
c = ClxDriver()
|
||||
try:
|
||||
if c.open(addr, direct_connection=direct):
|
||||
try:
|
||||
v = c.read_tag(tag)
|
||||
return v
|
||||
except DataError as e:
|
||||
c.close()
|
||||
print("Data Error during readTag({}, {}): {}".format(addr, tag, e))
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
c.close()
|
||||
print("Could not connect during readTag({}, {})".format(addr, tag))
|
||||
# print err
|
||||
except AttributeError as e:
|
||||
c.close()
|
||||
print("AttributeError during readTag({}, {}): \n{}".format(addr, tag, e))
|
||||
c.close()
|
||||
return False
|
||||
|
||||
|
||||
def read_array(addr, tag, start, end, plc_type="CLX"):
|
||||
"""Read an array from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
c = ClxDriver()
|
||||
if c.open(addr, direct_connection=direct):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
v = c.read_tag(tag_w_index)
|
||||
# print('{} - {}'.format(tag_w_index, v))
|
||||
arr_vals.append(round(v[0], 4))
|
||||
# print(v)
|
||||
if len(arr_vals) > 0:
|
||||
return arr_vals
|
||||
else:
|
||||
print("No length for {}".format(addr))
|
||||
return False
|
||||
except Exception:
|
||||
print("Error during readArray({}, {}, {}, {})".format(addr, tag, start, end))
|
||||
err = c.get_status()
|
||||
c.close()
|
||||
print(err)
|
||||
c.close()
|
||||
|
||||
|
||||
def write_tag(addr, tag, val, plc_type="CLX"):
|
||||
"""Write a tag value to the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
c = ClxDriver()
|
||||
try:
|
||||
if c.open(addr, direct_connection=direct):
|
||||
try:
|
||||
cv = c.read_tag(tag)
|
||||
print(cv)
|
||||
wt = c.write_tag(tag, val, cv[1])
|
||||
return wt
|
||||
except Exception:
|
||||
print("Error during writeTag({}, {}, {})".format(addr, tag, val))
|
||||
err = c.get_status()
|
||||
c.close()
|
||||
print(err)
|
||||
c.close()
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
c.close()
|
||||
print("Could not connect during writeTag({}, {}, {})".format(addr, tag, val))
|
||||
|
||||
|
||||
class Channel(object):
|
||||
"""Holds the configuration for a Meshify channel."""
|
||||
|
||||
def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
self.mesh_name = mesh_name
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def __str__(self):
|
||||
"""Create a string for the channel."""
|
||||
return "{}\nvalue: {}, last_send_time: {}".format(self.mesh_name, self.value, self.last_send_time)
|
||||
|
||||
def check(self, new_value, force_send=False):
|
||||
"""Check to see if the new_value needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.data_type == 'BOOL' or self.data_type == 'STRING':
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif not (self.value == new_value):
|
||||
if self.map_:
|
||||
if not self.value == self.map_[new_value]:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
else:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
else:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif abs(self.value - new_value) > self.chg_threshold:
|
||||
send_needed = True
|
||||
send_reason = "change threshold"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
if send_needed:
|
||||
self.last_value = self.value
|
||||
if self.map_:
|
||||
try:
|
||||
self.value = self.map_[new_value]
|
||||
except KeyError:
|
||||
print("Cannot find a map value for {} in {} for {}".format(new_value, self.map_, self.mesh_name))
|
||||
self.value = new_value
|
||||
else:
|
||||
self.value = new_value
|
||||
self.last_send_time = time.time()
|
||||
print("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
pass
|
||||
|
||||
|
||||
def identity(sent):
|
||||
"""Return exactly what was sent to it."""
|
||||
return sent
|
||||
|
||||
|
||||
class ModbusChannel(Channel):
|
||||
"""Modbus channel object."""
|
||||
|
||||
def __init__(self, mesh_name, register_number, data_type, chg_threshold, guarantee_sec, channel_size=1, map_=False, write_enabled=False, transformFn=identity):
|
||||
"""Initialize the channel."""
|
||||
super(ModbusChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.mesh_name = mesh_name
|
||||
self.register_number = register_number
|
||||
self.channel_size = channel_size
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.transformFn = transformFn
|
||||
|
||||
def read(self, mbsvalue):
|
||||
"""Return the transformed read value."""
|
||||
return self.transformFn(mbsvalue)
|
||||
|
||||
|
||||
class PLCChannel(Channel):
|
||||
"""PLC Channel Object."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False, plc_type='CLX'):
|
||||
"""Initialize the channel."""
|
||||
super(PLCChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.plc_type = plc_type
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
plc_value = None
|
||||
if self.plc_tag and self.plc_ip:
|
||||
read_value = read_tag(self.plc_ip, self.plc_tag, plc_type=self.plc_type)
|
||||
if read_value:
|
||||
plc_value = read_value[0]
|
||||
|
||||
return plc_value
|
||||
|
||||
|
||||
class BoolArrayChannels(Channel):
|
||||
"""Hold the configuration for a set of boolean array channels."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def compare_values(self, new_val_dict):
|
||||
"""Compare new values to old values to see if the values need storing."""
|
||||
send = False
|
||||
for idx in new_val_dict:
|
||||
try:
|
||||
if new_val_dict[idx] != self.last_value[idx]:
|
||||
send = True
|
||||
except KeyError:
|
||||
print("Key Error in self.compare_values for index {}".format(idx))
|
||||
send = True
|
||||
return send
|
||||
|
||||
def read(self, force_send=False):
|
||||
"""Read the value and check to see if needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.plc_tag:
|
||||
v = read_tag(self.plc_ip, self.plc_tag)
|
||||
if v:
|
||||
bool_arr = binarray(v[0])
|
||||
new_val = {}
|
||||
for idx in self.map_:
|
||||
try:
|
||||
new_val[self.map_[idx]] = bool_arr[idx]
|
||||
except KeyError:
|
||||
print("Not able to get value for index {}".format(idx))
|
||||
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.compare_values(new_val):
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
|
||||
if send_needed:
|
||||
self.value = new_val
|
||||
self.last_value = self.value
|
||||
self.last_send_time = time.time()
|
||||
print("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
12
meshifyDrivers/transferlite/config.txt
Normal file
12
meshifyDrivers/transferlite/config.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"files": {
|
||||
"file3": "channel.py",
|
||||
"file2": "utilities.py",
|
||||
"file1": "transferlite.py",
|
||||
"file4": "file_logger.py"
|
||||
},
|
||||
"deviceName": "transferlite",
|
||||
"driverId": "0230",
|
||||
"releaseVersion": "4",
|
||||
"driverFileName": "transferlite.py"
|
||||
}
|
||||
18
meshifyDrivers/transferlite/file_logger.py
Normal file
18
meshifyDrivers/transferlite/file_logger.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Logging setup for {{cookiecutter.driver_name}}"""
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import sys
|
||||
|
||||
log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s')
|
||||
log_file = './transferlite.log'
|
||||
my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=500*1024,
|
||||
backupCount=2, encoding=None, delay=0)
|
||||
my_handler.setFormatter(log_formatter)
|
||||
my_handler.setLevel(logging.INFO)
|
||||
filelogger = logging.getLogger('transferlite')
|
||||
filelogger.setLevel(logging.INFO)
|
||||
filelogger.addHandler(my_handler)
|
||||
|
||||
console_out = logging.StreamHandler(sys.stdout)
|
||||
console_out.setFormatter(log_formatter)
|
||||
filelogger.addHandler(console_out)
|
||||
21
meshifyDrivers/transferlite/persistence.py
Normal file
21
meshifyDrivers/transferlite/persistence.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Data persistance functions."""
|
||||
# if more advanced persistence is needed, use a sqlite database
|
||||
import json
|
||||
|
||||
|
||||
def load(filename="persist.json"):
|
||||
"""Load persisted settings from the specified file."""
|
||||
try:
|
||||
with open(filename, 'r') as persist_file:
|
||||
return json.load(persist_file)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def store(persist_obj, filename="persist.json"):
|
||||
"""Store the persisting settings into the specified file."""
|
||||
try:
|
||||
with open(filename, 'w') as persist_file:
|
||||
return json.dump(persist_obj, persist_file, indent=4)
|
||||
except Exception:
|
||||
return False
|
||||
188
meshifyDrivers/transferlite/transferlite.py
Normal file
188
meshifyDrivers/transferlite/transferlite.py
Normal file
@@ -0,0 +1,188 @@
|
||||
"""Driver for transferlite"""
|
||||
|
||||
import threading
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
from random import randint
|
||||
from device_base import deviceBase
|
||||
from channel import PLCChannel, read_tag, write_tag,TAG_DATAERROR_SLEEPTIME
|
||||
from utilities import get_public_ip_address
|
||||
from file_logger import filelogger as log
|
||||
|
||||
|
||||
_ = None
|
||||
log.info("transferlite startup")
|
||||
|
||||
TRUE_FALSE = {
|
||||
0: "false",
|
||||
1: "true"
|
||||
}
|
||||
|
||||
AUTO_MANUAL = {
|
||||
0: "Auto",
|
||||
1: "Manual"
|
||||
}
|
||||
|
||||
PHASE_STATES = {
|
||||
1: "Running",
|
||||
2: "Holding",
|
||||
4: "Restarting",
|
||||
8: "Stopping",
|
||||
16: "Aborting",
|
||||
32: "Resetting",
|
||||
64: "Idle",
|
||||
128: "Held",
|
||||
256: "Complete",
|
||||
512: "Stopped",
|
||||
1024: "Aborted"
|
||||
}
|
||||
|
||||
# GLOBAL VARIABLES
|
||||
WAIT_FOR_CONNECTION_SECONDS = 60
|
||||
IP_CHECK_PERIOD = 60
|
||||
WATCHDOG_ENABLE = True
|
||||
WATCHDOG_CHECK_PERIOD = 60
|
||||
WATCHDOG_SEND_PERIOD = 3600 # Seconds, the longest amount of time before sending the watchdog status
|
||||
PLC_IP_ADDRESS = "192.168.1.10"
|
||||
CHANNELS = [
|
||||
PLCChannel(PLC_IP_ADDRESS, "ft01_flowmeter_bpd", "FT01_Flowmeter_BPD", "REAL", 100.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "auto_manual", "sts_autoMode", "STRING", 1, 600, map_=AUTO_MANUAL),
|
||||
PLCChannel(PLC_IP_ADDRESS, "ft01_flowmeter_gpm", "FT01_Flowmeter.val", "REAL", 10.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "lt11_pondlevel", "LT11_PondLevel.val", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "lt21_pondlevel", "LT21_PondLevel.val", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "system1_hasleveltransmitter", "cfg_System1.hasLevelTransmitter", "STRING", 1.0, 600, map_=TRUE_FALSE),
|
||||
PLCChannel(PLC_IP_ADDRESS, "system2_hasleveltransmitter", "cfg_System2.hasLevelTransmitter", "STRING", 1.0, 600, map_=TRUE_FALSE),
|
||||
PLCChannel(PLC_IP_ADDRESS, "pt11_dischargepressure", "PT11_DischargePressure.val", "REAL", 10, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "pt21_dischargepressure", "PT21_DischargePressure.val", "REAL", 10, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "flow_rate_setpoint", "set_FlowRateSetpoint", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "system1_frequency_setpoint", "set_ManualFrequencySP_System1", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "system2_frequency_setpoint", "set_ManualFrequencySP_System2", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "ft01_flowmeter_bpd_yesterday", "FT01_Flowmeter_History[1]", "REAL", 1.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "ft01_flowmeter_bpd_today", "FT01_Flowmeter_History[0]", "REAL",100.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "mc11_motorfrequency", "MC11_Pump.status.speedFeedback", "REAL", 5.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "mc21_motorfrequency", "MC21_Pump.status.speedFeedback", "REAL", 5.0, 600),
|
||||
PLCChannel(PLC_IP_ADDRESS, "state_supervisor", "Supervisor.State", "STRING", 1.0, 600, map_=PHASE_STATES),
|
||||
PLCChannel(PLC_IP_ADDRESS, "state_system1", "System1.State", "STRING", 1.0, 600, map_=PHASE_STATES),
|
||||
PLCChannel(PLC_IP_ADDRESS, "state_system2", "System2.State", "STRING", 1.0, 600, map_=PHASE_STATES)
|
||||
]
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
"""Start class required by Meshify."""
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None,
|
||||
companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
"""Initialize the driver."""
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q,
|
||||
mcu=mcu, companyId=companyId, offset=offset,
|
||||
mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.version = "4"
|
||||
self.finished = threading.Event()
|
||||
self.force_send = False
|
||||
self.public_ip_address = ""
|
||||
self.public_ip_address_last_checked = 0
|
||||
self.watchdog = False
|
||||
self.watchdog_last_checked = 0
|
||||
self.watchdog_last_sent = 0
|
||||
threading.Thread.start(self)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
"""Register the driver."""
|
||||
#self.sendtodbDev("log", "BOOM! Booted.", 0)
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""Actually run the driver."""
|
||||
for i in range(0, WAIT_FOR_CONNECTION_SECONDS):
|
||||
print("transferlite driver will start in {} seconds".format(WAIT_FOR_CONNECTION_SECONDS - i))
|
||||
time.sleep(1)
|
||||
log.info("BOOM! Starting transferlite driver...")
|
||||
|
||||
self._check_ip_address()
|
||||
self._check_watchdog()
|
||||
|
||||
self.nodes["transferlite_0199"] = self
|
||||
|
||||
send_loops = 0
|
||||
|
||||
while True:
|
||||
now = time.time()
|
||||
if self.force_send:
|
||||
log.warning("FORCE SEND: TRUE")
|
||||
|
||||
for chan in CHANNELS:
|
||||
read_val = chan.read()
|
||||
if chan.check(read_val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'transferlite')
|
||||
time.sleep(TAG_DATAERROR_SLEEPTIME)
|
||||
|
||||
# print("transferlite driver still alive...")
|
||||
if self.force_send:
|
||||
if send_loops > 2:
|
||||
log.warning("Turning off force_send")
|
||||
self.force_send = False
|
||||
send_loops = 0
|
||||
else:
|
||||
send_loops += 1
|
||||
|
||||
if WATCHDOG_ENABLE:
|
||||
if (now - self.watchdog_last_checked) > WATCHDOG_CHECK_PERIOD:
|
||||
self._check_watchdog()
|
||||
|
||||
if (now - self.public_ip_address_last_checked) > IP_CHECK_PERIOD:
|
||||
self._check_ip_address()
|
||||
|
||||
|
||||
def _check_watchdog(self):
|
||||
"""Check the watchdog and send to Meshify if changed or stale."""
|
||||
test_watchdog = self.transferlite_watchdog()
|
||||
now = time.time()
|
||||
self.watchdog_last_checked = now
|
||||
if test_watchdog != self.watchdog or (now - self.watchdog_last_sent) > WATCHDOG_SEND_PERIOD:
|
||||
self.sendtodbDev(1, 'watchdog', test_watchdog, 0, 'transferlite')
|
||||
self.watchdog = test_watchdog
|
||||
self.watchdog_last_sent = now
|
||||
|
||||
|
||||
def _check_ip_address(self):
|
||||
"""Check the public IP address and send to Meshify if changed."""
|
||||
self.public_ip_address_last_checked = time.time()
|
||||
test_public_ip = get_public_ip_address()
|
||||
if not test_public_ip == self.public_ip_address:
|
||||
self.sendtodbDev(1, 'public_ip_address', test_public_ip, 0, 'transferlite')
|
||||
self.public_ip_address = test_public_ip
|
||||
|
||||
def transferlite_watchdog(self):
|
||||
"""Write a random integer to the PLC and then 1 seconds later check that it has been decremented by 1."""
|
||||
randval = randint(0, 32767)
|
||||
write_tag(str(PLC_IP_ADDRESS), 'watchdog_INT', randval)
|
||||
time.sleep(1)
|
||||
watchdog_val = read_tag(str(PLC_IP_ADDRESS), 'watchdog_INT')
|
||||
try:
|
||||
return (randval - 1) == watchdog_val[0]
|
||||
except (KeyError, TypeError):
|
||||
return False
|
||||
|
||||
def transferlite_sync(self, name, value):
|
||||
"""Sync all data from the driver."""
|
||||
self.force_send = True
|
||||
# self.sendtodb("log", "synced", 0)
|
||||
return True
|
||||
|
||||
def transferlite_writeplctag(self, name, value):
|
||||
"""Write a value to the PLC."""
|
||||
new_val = json.loads(str(value).replace("'", '"'))
|
||||
tag_n = str(new_val['tag']) # "cmd_Start"
|
||||
val_n = new_val['val']
|
||||
w = write_tag(str(PLC_IP_ADDRESS), tag_n, val_n)
|
||||
log.info("Result of transferlite_writeplctag(self, {}, {}) = {}".format(name, value, w))
|
||||
if w is None:
|
||||
w = "Error writing to PLC..."
|
||||
return w
|
||||
69
meshifyDrivers/transferlite/utilities.py
Normal file
69
meshifyDrivers/transferlite/utilities.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""Utility functions for the driver."""
|
||||
import socket
|
||||
import struct
|
||||
import time
|
||||
|
||||
|
||||
def get_public_ip_address():
|
||||
"""Find the public IP Address of the host device."""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
sock.connect(("8.8.8.8", 80))
|
||||
ip_address = sock.getsockname()[0]
|
||||
sock.close()
|
||||
return ip_address
|
||||
|
||||
|
||||
def int_to_float16(int_to_convert):
|
||||
"""Convert integer into float16 representation."""
|
||||
bin_rep = ('0' * 16 + '{0:b}'.format(int_to_convert))[-16:]
|
||||
sign = 1.0
|
||||
if int(bin_rep[0]) == 1:
|
||||
sign = -1.0
|
||||
exponent = float(int(bin_rep[1:6], 2))
|
||||
fraction = float(int(bin_rep[6:17], 2))
|
||||
|
||||
if exponent == float(0b00000):
|
||||
return sign * 2 ** -14 * fraction / (2.0 ** 10.0)
|
||||
elif exponent == float(0b11111):
|
||||
if fraction == 0:
|
||||
return sign * float("inf")
|
||||
return float("NaN")
|
||||
frac_part = 1.0 + fraction / (2.0 ** 10.0)
|
||||
return sign * (2 ** (exponent - 15)) * frac_part
|
||||
|
||||
|
||||
def ints_to_float(int1, int2):
|
||||
"""Convert 2 registers into a floating point number."""
|
||||
mypack = struct.pack('>HH', int1, int2)
|
||||
f_unpacked = struct.unpack('>f', mypack)
|
||||
print("[{}, {}] >> {}".format(int1, int2, f_unpacked[0]))
|
||||
return f_unpacked[0]
|
||||
|
||||
|
||||
def degf_to_degc(temp_f):
|
||||
"""Convert deg F to deg C."""
|
||||
return (temp_f - 32.0) * (5.0/9.0)
|
||||
|
||||
|
||||
def degc_to_degf(temp_c):
|
||||
"""Convert deg C to deg F."""
|
||||
return temp_c * 1.8 + 32.0
|
||||
|
||||
|
||||
class Every(object):
|
||||
"""Class that runs a specific method every so often."""
|
||||
|
||||
def __init__(self, func, seconds, run_at_start=True):
|
||||
self.func = func
|
||||
self.seconds = seconds
|
||||
self.last_run = time.time()
|
||||
if run_at_start:
|
||||
self.check(force=True)
|
||||
|
||||
def check(self, force=False):
|
||||
"""Check to see if the function needs to be run."""
|
||||
now = time.time()
|
||||
if ((now - self.last_run) > self.seconds) or force:
|
||||
self.last_run = now
|
||||
return self.func()
|
||||
return None
|
||||
Reference in New Issue
Block a user