Added report generator for thingsboard
This commit is contained in:
615
meshifyDrivers/piflow/Channel.py
Normal file
615
meshifyDrivers/piflow/Channel.py
Normal file
@@ -0,0 +1,615 @@
|
||||
"""Define Meshify channel class."""
|
||||
import time
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from pycomm.cip.cip_base import CommError, DataError
|
||||
from file_logger import filelogger as log
|
||||
import minimalmodbus
|
||||
|
||||
minimalmodbus.BAUDRATE = 9600
|
||||
minimalmodbus.STOPBITS = 1
|
||||
|
||||
TAG_DATAERROR_SLEEPTIME = 5
|
||||
|
||||
def binarray(intval):
|
||||
"""Split an integer into its bits."""
|
||||
bin_string = '{0:08b}'.format(intval)
|
||||
bin_arr = [i for i in bin_string]
|
||||
bin_arr.reverse()
|
||||
return bin_arr
|
||||
|
||||
|
||||
def read_tag(addr, tag, plc_type="CLX"):
|
||||
"""Read a tag from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
val = clx.read_tag(tag)
|
||||
clx.close()
|
||||
return val
|
||||
except DataError as err:
|
||||
clx.close()
|
||||
time.sleep(TAG_DATAERROR_SLEEPTIME)
|
||||
log.error("Data Error during readTag({}, {}): {}".format(addr, tag, err))
|
||||
except CommError:
|
||||
# err = c.get_status()
|
||||
clx.close()
|
||||
log.error("Could not connect during readTag({}, {})".format(addr, tag))
|
||||
except AttributeError as err:
|
||||
clx.close()
|
||||
log.error("AttributeError during readTag({}, {}): \n{}".format(addr, tag, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
def read_array(addr, tag, start, end, plc_type="CLX"):
|
||||
"""Read an array from the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
val = clx.read_tag(tag_w_index)
|
||||
arr_vals.append(round(val[0], 4))
|
||||
if arr_vals:
|
||||
clx.close()
|
||||
return arr_vals
|
||||
else:
|
||||
log.error("No length for {}".format(addr))
|
||||
clx.close()
|
||||
return False
|
||||
except Exception:
|
||||
log.error("Error during readArray({}, {}, {}, {})".format(addr, tag, start, end))
|
||||
err = clx.get_status()
|
||||
clx.close()
|
||||
log.error(err)
|
||||
clx.close()
|
||||
|
||||
|
||||
def write_tag(addr, tag, val, plc_type="CLX"):
|
||||
"""Write a tag value to the PLC."""
|
||||
direct = plc_type == "Micro800"
|
||||
clx = ClxDriver()
|
||||
try:
|
||||
if clx.open(addr, direct_connection=direct):
|
||||
try:
|
||||
initial_val = clx.read_tag(tag)
|
||||
write_status = clx.write_tag(tag, val, initial_val[1])
|
||||
clx.close()
|
||||
return write_status
|
||||
except DataError as err:
|
||||
clx_err = clx.get_status()
|
||||
clx.close()
|
||||
log.error("--\nDataError during writeTag({}, {}, {}, plc_type={}) -- {}\n{}\n".format(addr, tag, val, plc_type, err, clx_err))
|
||||
|
||||
except CommError as err:
|
||||
clx_err = clx.get_status()
|
||||
log.error("--\nCommError during write_tag({}, {}, {}, plc_type={})\n{}\n--".format(addr, tag, val, plc_type, err))
|
||||
clx.close()
|
||||
return False
|
||||
|
||||
|
||||
class Channel(object):
|
||||
"""Holds the configuration for a Meshify channel."""
|
||||
|
||||
def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
self.mesh_name = mesh_name
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def __str__(self):
|
||||
"""Create a string for the channel."""
|
||||
return "{}\nvalue: {}, last_send_time: {}".format(self.mesh_name, self.value, self.last_send_time)
|
||||
|
||||
def check(self, new_value, force_send=False):
|
||||
"""Check to see if the new_value needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.data_type == 'BOOL' or self.data_type == 'STRING' or type(new_value) == str:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.value != new_value:
|
||||
if self.map_:
|
||||
if not self.value == self.map_[new_value]:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
else:
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
else:
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif abs(self.value - new_value) > self.chg_threshold:
|
||||
send_needed = True
|
||||
send_reason = "change threshold"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
if send_needed:
|
||||
self.last_value = self.value
|
||||
if self.map_:
|
||||
try:
|
||||
self.value = self.map_[new_value]
|
||||
except KeyError:
|
||||
log.error("Cannot find a map value for {} in {} for {}".format(new_value, self.map_, self.mesh_name))
|
||||
self.value = new_value
|
||||
else:
|
||||
self.value = new_value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
pass
|
||||
|
||||
|
||||
def identity(sent):
|
||||
"""Return exactly what was sent to it."""
|
||||
return sent
|
||||
|
||||
def volume_units(vunit):
|
||||
units = {
|
||||
0: "cm cubed/s",
|
||||
1: "cm cubed/min",
|
||||
2: "cm cubed/h",
|
||||
3: "cm cubed/d",
|
||||
4: "dm cubed/s",
|
||||
5: "dm cubed/min",
|
||||
6: "dm cubed/h",
|
||||
7: "dm cubed/d",
|
||||
8: "m cubed/s",
|
||||
9: "m cubed/min",
|
||||
10: "m cubed/h",
|
||||
11: "m cubed/d",
|
||||
12: "ml/s",
|
||||
13: "ml/min",
|
||||
14: "ml/h",
|
||||
15: "ml/d",
|
||||
16: "l/s",
|
||||
17: "l/min",
|
||||
18: "l/h (+)",
|
||||
19: "l/d",
|
||||
20: "hl/s",
|
||||
21: "hl/min",
|
||||
22: "hl/h",
|
||||
23: "hl/d",
|
||||
24: "Ml/s",
|
||||
25: "Ml/min",
|
||||
26: "Ml/h",
|
||||
27: "Ml/d",
|
||||
32: "af/s",
|
||||
33: "af/min",
|
||||
34: "af/h",
|
||||
35: "af/d",
|
||||
36: "ft cubed/s",
|
||||
37: "ft cubed/min",
|
||||
38: "ft cubed/h",
|
||||
39: "ft cubed/d",
|
||||
40: "fl oz/s (us)",
|
||||
41: "fl oz/min (us)",
|
||||
42: "fl oz/h (us)",
|
||||
43: "fl oz/d (us)",
|
||||
44: "gal/s (us)",
|
||||
45: "gal/min (us)",
|
||||
46: "gal/h (us)",
|
||||
47: "gal/d (us)",
|
||||
48: "Mgal/s (us)",
|
||||
49: "Mgal/min (us)",
|
||||
50: "Mgal/h (us)",
|
||||
51: "Mgal/d (us)",
|
||||
52: "bbl/s (us;liq.)",
|
||||
53: "bbl/min (us;liq.)",
|
||||
54: "bbl/h (us;liq.)",
|
||||
55: "bbl/d (us;liq.)",
|
||||
56: "bbl/s (us;beer)",
|
||||
57: "bbl/min (us;beer)",
|
||||
58: "bbl/h (us;beer)",
|
||||
59: "bbl/d (us;beer)",
|
||||
60: "bbl/s (us;oil)",
|
||||
61: "bbl/min (us;oil)",
|
||||
62: "bbl/h (us;oil)",
|
||||
63: "bbl/d (us;oil)",
|
||||
64: "bbl/s (us;tank)",
|
||||
65: "bbl/min (us;tank)",
|
||||
66: "bbl/h (us;tank)",
|
||||
67: "bbl/d (us;tank)",
|
||||
68: "gal/s (imp)",
|
||||
69: "gal/min (imp)",
|
||||
70: "gal/h (imp)",
|
||||
71: "gal/d (imp)",
|
||||
72: "Mgal/s (imp)",
|
||||
73: "Mgal/min (imp)",
|
||||
74: "Mgal/h (imp)",
|
||||
75: "Mgal/d (imp)",
|
||||
76: "bbl/s (imp;beer)",
|
||||
77: "bbl/min (imp;beer)",
|
||||
78: "bbl/h (imp;beer)",
|
||||
79: "bbl/d (imp;beer)",
|
||||
80: "bbl/s (imp;oil)",
|
||||
81: "bbl/min (imp;oil)",
|
||||
82: "bbl/h (imp;oil)",
|
||||
83: "bbl/d (imp;oil)",
|
||||
88: "kgal/s (us)",
|
||||
89: "kgal/min (us)",
|
||||
90: "kgal/h (us)",
|
||||
91: "kgal/d (us)",
|
||||
92: "MMft cubed/s",
|
||||
93: "MMft cubed/min",
|
||||
94: "MMft cubed/h",
|
||||
96: "Mft cubed/d"
|
||||
}
|
||||
return units[vunit]
|
||||
|
||||
def totalizer_units(tunit):
|
||||
|
||||
units = {
|
||||
0: "cm cubed",
|
||||
1: "dm cubed",
|
||||
2: "m cubed",
|
||||
3: "ml",
|
||||
4: "l",
|
||||
5: "hl",
|
||||
6: "Ml Mega",
|
||||
8: "af",
|
||||
9: "ft cubed",
|
||||
10: "fl oz (us)",
|
||||
11: "gal (us)",
|
||||
12: "Mgal (us)",
|
||||
13: "bbl (us;liq.)",
|
||||
14: "bbl (us;beer)",
|
||||
15: "bbl (us;oil)",
|
||||
16: "bbl (us;tank)",
|
||||
17: "gal (imp)",
|
||||
18: "Mgal (imp)",
|
||||
19: "bbl (imp;beer)",
|
||||
20: "bbl (imp;oil)",
|
||||
22: "kgal (us)",
|
||||
23: "Mft cubed",
|
||||
50: "g",
|
||||
51: "kg",
|
||||
52: "t",
|
||||
53: "oz",
|
||||
54: "lb",
|
||||
55: "STon",
|
||||
100: "Nl",
|
||||
101: "Nm cubed",
|
||||
102: "Sm cubed",
|
||||
103: "Sft cubed",
|
||||
104: "Sl",
|
||||
105: "Sgal (us)",
|
||||
106: "Sbbl (us;liq.)",
|
||||
107: "Sgal (imp)",
|
||||
108: "Sbbl (us;oil)",
|
||||
109: "MMSft cubed",
|
||||
110: "Nhl",
|
||||
251: "None"
|
||||
}
|
||||
return units[tunit]
|
||||
|
||||
def int_to_bits(n,x):
|
||||
return pad_to_x([int(digit) for digit in bin(n)[2:]],x) # [2:] to chop off the "0b" part
|
||||
|
||||
def pad_to_x(n,x):
|
||||
while len(n) < x:
|
||||
n = [0] + n
|
||||
return n
|
||||
|
||||
def status_codes(n):
|
||||
|
||||
status_array = int_to_bits(n,16)
|
||||
status_low = {
|
||||
0: "Stopped;",
|
||||
1: "Operating in Forward;",
|
||||
2: "Operating in Reverse;",
|
||||
3: "DC operating;"
|
||||
}
|
||||
status_mid = {
|
||||
0: "",
|
||||
1: "Speed searching;",
|
||||
2: "Accelerating;",
|
||||
3: "At constant speed;",
|
||||
4: "Decelerating;",
|
||||
5: "Decelerating to stop;",
|
||||
6: "H/W OCS;",
|
||||
7: "S/W OCS;",
|
||||
8: "Dwell operating;"
|
||||
}
|
||||
status_high = {
|
||||
0: "Normal state",
|
||||
4: "Warning occurred",
|
||||
8: "Fault occurred"
|
||||
}
|
||||
values = {
|
||||
0: 8,
|
||||
1: 4,
|
||||
2: 2,
|
||||
3: 1
|
||||
}
|
||||
|
||||
stats_low = status_array[12:]
|
||||
stats_mid = status_array[8:12]
|
||||
stats_high = status_array[:4]
|
||||
low = 0
|
||||
mid = 0
|
||||
high = 0
|
||||
for x in range(4):
|
||||
if stats_low[x] == 1:
|
||||
low = low + values[x]
|
||||
if stats_mid[x] == 1:
|
||||
mid = mid + values[x]
|
||||
if stats_high[x] == 1:
|
||||
high = high + values[x]
|
||||
|
||||
return status_low[low] + " " + status_mid[mid] + ' ' + status_high[high]
|
||||
|
||||
def fault_code_a(n):
|
||||
|
||||
fault_code_array = int_to_bits(n,16)
|
||||
|
||||
""" fault = {
|
||||
0: "OCT",
|
||||
1: "OVT",
|
||||
2: "EXT-A",
|
||||
3: "EST",
|
||||
4: "COL",
|
||||
5: "GFT",
|
||||
6: "OHT",
|
||||
7: "ETH",
|
||||
8: "OLT",
|
||||
9: "Reserved",
|
||||
10: "EXT-B",
|
||||
11: "EEP",
|
||||
12: "FAN",
|
||||
13: "POT",
|
||||
14: "IOLT",
|
||||
15: "LVT"
|
||||
} """
|
||||
fault = {
|
||||
0: "Overload Trip",
|
||||
1: "Underload Trip",
|
||||
2: "Inverter Overload Trip",
|
||||
3: "E-Thermal Trip",
|
||||
4: "Ground Fault Trip",
|
||||
5: "Output Image Trip",
|
||||
6: "Inmput Imaging Trip",
|
||||
7: "Reserved",
|
||||
8: "Reserved",
|
||||
9: "NTC Trip",
|
||||
10: "Overcurrent Trip",
|
||||
11: "Overvoltage Trip",
|
||||
12: "External Trip",
|
||||
13: "Arm Short",
|
||||
14: "Over Heat Trip",
|
||||
15: "Fuse Open Trip"
|
||||
}
|
||||
|
||||
faults = []
|
||||
counter = 15
|
||||
for x in range(16):
|
||||
if fault_code_array[x] == 1:
|
||||
faults = [fault[counter]] + faults
|
||||
counter = counter - 1
|
||||
return ' '.join(faults)
|
||||
|
||||
def fault_code_b(n):
|
||||
|
||||
fault_code_array = int_to_bits(n,8)
|
||||
|
||||
""" fault = {
|
||||
0: "COM",
|
||||
1: "Reserved",
|
||||
2: "NTC",
|
||||
3: "REEP",
|
||||
4: "OC2",
|
||||
5: "NBR",
|
||||
6: "SAFA",
|
||||
7: "SAFB"
|
||||
} """
|
||||
fault = {
|
||||
0: "Reserved",
|
||||
1: "Reserved",
|
||||
2: "Reserved",
|
||||
3: "FAN Trip",
|
||||
4: "Reserved",
|
||||
5: "Reserved",
|
||||
6: "Pre PID Fail",
|
||||
7: "Bad contact at basic I/O board",
|
||||
8: "External Brake Trip",
|
||||
9: "No Motor Trip",
|
||||
10: "Bad Option Card",
|
||||
11: "Reserved",
|
||||
12: "Reserved",
|
||||
13: "Reserved",
|
||||
14: "Pre Over Heat Trip",
|
||||
15: "Reserved"
|
||||
}
|
||||
|
||||
faults = []
|
||||
counter = 7
|
||||
for x in range(8):
|
||||
if fault_code_array[x] == 1:
|
||||
faults = [fault[counter]] + faults
|
||||
counter = counter - 1
|
||||
return ' '.join(faults)
|
||||
|
||||
class ModbusChannel(Channel):
|
||||
"""Modbus channel object."""
|
||||
|
||||
def __init__(self, mesh_name, register_number, data_type, chg_threshold, guarantee_sec, channel_size=1, map_=False, write_enabled=False, transform_fn=identity, unit_number=1, scaling=0):
|
||||
"""Initialize the channel."""
|
||||
super(ModbusChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.mesh_name = mesh_name
|
||||
self.register_number = register_number
|
||||
self.channel_size = channel_size
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.transform_fn = transform_fn
|
||||
self.unit_number = unit_number
|
||||
self.instrument = minimalmodbus.Instrument('/dev/ttyS0', self.unit_number)
|
||||
self.scaling= scaling
|
||||
|
||||
def read(self):
|
||||
"""Return the transformed read value."""
|
||||
if self.data_type == "FLOAT":
|
||||
try:
|
||||
read_value = self.instrument.read_float(self.register_number,4,self.channel_size)
|
||||
except IOError as e:
|
||||
log.info(e)
|
||||
return None
|
||||
|
||||
elif self.data_type == "INTEGER" or self.data_type == "STRING":
|
||||
try:
|
||||
read_value = self.instrument.read_register(self.register_number, self.scaling, 4)
|
||||
except IOError as e:
|
||||
log.info(e)
|
||||
return None
|
||||
read_value = self.transform_fn(read_value)
|
||||
return read_value
|
||||
|
||||
def write(self, value):
|
||||
"""Write a value to a register"""
|
||||
if self.data_type == "FLOAT":
|
||||
value = float(value)
|
||||
elif self.data_type == "INTEGER":
|
||||
value = int(value)
|
||||
else:
|
||||
value = str(value)
|
||||
try:
|
||||
self.instrument.write_register(self.register_number,value, self.scaling, 16 if self.channel_size > 1 else 6 )
|
||||
return True
|
||||
except Exception as e:
|
||||
log.info("Failed to write value: {}".format(e))
|
||||
return False
|
||||
|
||||
|
||||
class PLCChannel(Channel):
|
||||
"""PLC Channel Object."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False, plc_type='CLX'):
|
||||
"""Initialize the channel."""
|
||||
super(PLCChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
self.plc_type = plc_type
|
||||
|
||||
def read(self):
|
||||
"""Read the value."""
|
||||
plc_value = None
|
||||
if self.plc_tag and self.plc_ip:
|
||||
read_value = read_tag(self.plc_ip, self.plc_tag, plc_type=self.plc_type)
|
||||
if read_value:
|
||||
plc_value = read_value[0]
|
||||
|
||||
return plc_value
|
||||
|
||||
|
||||
class BoolArrayChannels(Channel):
|
||||
"""Hold the configuration for a set of boolean array channels."""
|
||||
|
||||
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
|
||||
"""Initialize the channel."""
|
||||
super(BoolArrayChannels, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
|
||||
self.plc_ip = ip
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
self.map_ = map_
|
||||
self.write_enabled = write_enabled
|
||||
|
||||
def compare_values(self, new_val_dict):
|
||||
"""Compare new values to old values to see if the values need storing."""
|
||||
send = False
|
||||
for idx in new_val_dict:
|
||||
try:
|
||||
if new_val_dict[idx] != self.last_value[idx]:
|
||||
send = True
|
||||
except KeyError:
|
||||
log.error("Key Error in self.compare_values for index {}".format(idx))
|
||||
send = True
|
||||
return send
|
||||
|
||||
def read(self, force_send=False):
|
||||
"""Read the value and check to see if needs to be stored."""
|
||||
send_needed = False
|
||||
send_reason = ""
|
||||
if self.plc_tag:
|
||||
val = read_tag(self.plc_ip, self.plc_tag)
|
||||
if val:
|
||||
bool_arr = binarray(val[0])
|
||||
new_val = {}
|
||||
for idx in self.map_:
|
||||
try:
|
||||
new_val[self.map_[idx]] = bool_arr[idx]
|
||||
except KeyError:
|
||||
log.error("Not able to get value for index {}".format(idx))
|
||||
|
||||
if self.last_send_time == 0:
|
||||
send_needed = True
|
||||
send_reason = "no send time"
|
||||
elif self.value is None:
|
||||
send_needed = True
|
||||
send_reason = "no value"
|
||||
elif self.compare_values(new_val):
|
||||
send_needed = True
|
||||
send_reason = "value change"
|
||||
elif (time.time() - self.last_send_time) > self.guarantee_sec:
|
||||
send_needed = True
|
||||
send_reason = "guarantee sec"
|
||||
elif force_send:
|
||||
send_needed = True
|
||||
send_reason = "forced"
|
||||
|
||||
if send_needed:
|
||||
self.value = new_val
|
||||
self.last_value = self.value
|
||||
self.last_send_time = time.time()
|
||||
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
|
||||
return send_needed
|
||||
563
meshifyDrivers/piflow/PiFlow.py
Normal file
563
meshifyDrivers/piflow/PiFlow.py
Normal file
@@ -0,0 +1,563 @@
|
||||
"""Driver for PiFlow"""
|
||||
import os
|
||||
import threading
|
||||
import json
|
||||
import time
|
||||
from random import randint
|
||||
from datetime import datetime as dt
|
||||
from device_base import deviceBase
|
||||
import persistence
|
||||
from utilities import get_public_ip_address, get_private_ip_address
|
||||
from file_logger import filelogger as log
|
||||
"""import RPi.GPIO as GPIO
|
||||
|
||||
Relay_Ch1 = 26
|
||||
Relay_Ch2 = 20
|
||||
Relay_Ch3 = 21
|
||||
|
||||
GPIO.setwarnings(False)
|
||||
GPIO.setmode(GPIO.BCM)
|
||||
|
||||
GPIO.setup(Relay_Ch1,GPIO.OUT)
|
||||
GPIO.output(Relay_Ch1, GPIO.HIGH)
|
||||
GPIO.setup(Relay_Ch2,GPIO.OUT)
|
||||
GPIO.output(Relay_Ch2, GPIO.HIGH)
|
||||
GPIO.setup(Relay_Ch3,GPIO.OUT)
|
||||
GPIO.output(Relay_Ch3, GPIO.HIGH)
|
||||
"""
|
||||
_ = None
|
||||
os.system('sudo timedatectl set-timezone America/Chicago')
|
||||
log.info("PiFlow startup")
|
||||
|
||||
# GLOBAL VARIABLES
|
||||
WAIT_FOR_CONNECTION_SECONDS = 5
|
||||
IP_CHECK_PERIOD = 60
|
||||
|
||||
|
||||
# PERSISTENCE FILE
|
||||
PERSIST = persistence.load('persist.json')
|
||||
if not PERSIST:
|
||||
PERSIST = {
|
||||
'flowmeter': 247,
|
||||
'drive': 1,
|
||||
'isVFD': False,
|
||||
'drive_enabled': True,
|
||||
'state': False,
|
||||
'state_timer': 0,
|
||||
'plc_ip': '192.168.1.12',
|
||||
'yesterday_totalizer_1': dt.today().day,
|
||||
'yesterday_totalizer_2': dt.today().day,
|
||||
'yesterday_totalizer_3': dt.today().day,
|
||||
'yesterday_total_totalizer_1': 0,
|
||||
'yesterday_total_midnight_totalizer_1': 0,
|
||||
'yesterday_total_totalizer_2': 0,
|
||||
'yesterday_total_midnight_totalizer_2': 0,
|
||||
'yesterday_total_totalizer_3': 0,
|
||||
'yesterday_total_midnight_totalizer_3': 0
|
||||
}
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
"""
|
||||
try:
|
||||
if time.time() - PERSIST['state_timer'] >= 60:
|
||||
GPIO.output(Relay_Ch1, GPIO.HIGH)
|
||||
PERSIST['state'] = False
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
elif PERSIST['state']:
|
||||
GPIO.output(Relay_Ch1, GPIO.LOW)
|
||||
else:
|
||||
GPIO.output(Relay_Ch1, GPIO.HIGH)
|
||||
except:
|
||||
PERSIST['state'] = False
|
||||
PERSIST['state_time'] = time.time()
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
"""
|
||||
drive_enabled = PERSIST['drive_enabled']
|
||||
try:
|
||||
isVFD = PERSIST['isVFD']
|
||||
except:
|
||||
PERSIST['isVFD'] = False
|
||||
isVFD = PERSIST['isVFD']
|
||||
persistence.store(PERSIST)
|
||||
|
||||
try:
|
||||
plc_ip = PERSIST['plc_ip']
|
||||
except:
|
||||
PERSIST['plc_ip'] = '192.168.1.12'
|
||||
plc_ip = PERSIST['plc_ip']
|
||||
persistence.store(PERSIST)
|
||||
|
||||
from Tags import tags
|
||||
|
||||
CHANNELS = tags
|
||||
from runtimeStats import RuntimeStats as RTS
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
"""Start class required by Meshify."""
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None,
|
||||
companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
"""Initialize the driver."""
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q,
|
||||
mcu=mcu, companyId=companyId, offset=offset,
|
||||
mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.version = "28"
|
||||
self.finished = threading.Event()
|
||||
self.force_send = False
|
||||
self.public_ip_address = ""
|
||||
self.private_ip_address = ""
|
||||
self.public_ip_address_last_checked = 0
|
||||
self.status = ""
|
||||
self.alarm = ""
|
||||
self.rts = RTS()
|
||||
self.rts.loadDataFromFile()
|
||||
self.rts.saveDataToFile()
|
||||
|
||||
threading.Thread.start(self)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
"""Register the driver."""
|
||||
# self.sendtodb("log", "BOOM! Booted.", 0)
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""Actually run the driver."""
|
||||
for i in range(0, WAIT_FOR_CONNECTION_SECONDS):
|
||||
print("PiFlow driver will start in {} seconds".format(WAIT_FOR_CONNECTION_SECONDS - i))
|
||||
time.sleep(1)
|
||||
log.info("BOOM! Starting PiFlow driver...")
|
||||
|
||||
#self._check_watchdog()
|
||||
self._check_ip_address()
|
||||
|
||||
self.nodes["PiFlow_0199"] = self
|
||||
|
||||
send_loops = 0
|
||||
|
||||
while True:
|
||||
now = time.time()
|
||||
if self.force_send:
|
||||
log.warning("FORCE SEND: TRUE")
|
||||
if isVFD:
|
||||
status = {}
|
||||
for chan in CHANNELS[:24]: #build status/alarm strings
|
||||
try:
|
||||
val = chan.read()
|
||||
chan.check(val, self.force_send)
|
||||
status[chan.mesh_name] = chan.value
|
||||
except Exception as e:
|
||||
log.warning("An error occured in status check: {}".format(e))
|
||||
try:
|
||||
self.sendStatus(status)
|
||||
except Exception as e:
|
||||
log.warning("An error occured in send status: {}".format(e))
|
||||
for chan in CHANNELS[24:]:
|
||||
try:
|
||||
val = chan.read()
|
||||
if chan.mesh_name in ['totalizer_1','totalizer_2','totalizer_3']:
|
||||
right_now = dt.today()
|
||||
today_total, yesterday_total = self.totalize(val, PERSIST['yesterday_'+chan.mesh_name], right_now.day, right_now.hour, right_now.minute, PERSIST['yesterday_total_midnight_'+chan.mesh_name], PERSIST['yesterday_total_'+chan.mesh_name], chan.mesh_name)
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1,"today_"+chan.mesh_name, today_total,0,'PiFlow')
|
||||
self.sendtodbDev(1,"yesterday_"+chan.mesh_name, yesterday_total,0,'PiFlow')
|
||||
self.sendtodbDev(1, chan.mesh_name + "_units", "BBL",0,'PiFlow')
|
||||
elif chan.mesh_name == "frequency":
|
||||
if val > 0:
|
||||
self.rts.addHertzDataPoint(val)
|
||||
self.rts.saveDataToFile()
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1, "avgFrequency30Days", self.rts.calculateAverageHertzMultiDay(),0,'PiFlow')
|
||||
else:
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
except Exception as e:
|
||||
log.warning("An error occured in data collection: {}".format(e))
|
||||
else:
|
||||
for chan in CHANNELS:
|
||||
try:
|
||||
if chan.mesh_name == "remote_start":
|
||||
val = PERSIST["state"]
|
||||
else:
|
||||
val = None
|
||||
for _ in range(3):
|
||||
temp = chan.read()
|
||||
if not temp == None:
|
||||
val = temp
|
||||
if val == None:
|
||||
log.info("No modbus data sending previous value")
|
||||
val = chan.value
|
||||
if chan.mesh_name in ['totalizer_1','totalizer_2','totalizer_3']:
|
||||
right_now = dt.today()
|
||||
today_total, yesterday_total = self.totalize(val, PERSIST['yesterday_'+chan.mesh_name], right_now.day, right_now.hour, right_now.minute, PERSIST['yesterday_total_midnight_'+chan.mesh_name], PERSIST['yesterday_total_'+chan.mesh_name], chan.mesh_name)
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1,"today_"+chan.mesh_name, today_total,0,'PiFlow')
|
||||
self.sendtodbDev(1,"yesterday_"+chan.mesh_name, yesterday_total,0,'PiFlow')
|
||||
elif chan.mesh_name == "volume_flow" and not PERSIST['drive_enabled']:
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
if chan.value > 0:
|
||||
self.sendtodbDev(1, "run_status", "Running", 0, 'PiFlow')
|
||||
if not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"]:
|
||||
self.rts.startRun()
|
||||
self.rts.saveDataToFile()
|
||||
else:
|
||||
self.sendtodbDev(1,"run_status", "Stopped", 0, 'PiFlow')
|
||||
if self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"] and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["end"]:
|
||||
self.rts.endRun()
|
||||
self.rts.saveDataToFile()
|
||||
|
||||
self.sendtodbDev(1, "percentRunTime30Days", self.rts.calculateRunPercentMultiDay(), 0,'PiFlow')
|
||||
elif chan.mesh_name == "run_status":
|
||||
if "Operating" in val and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"]:
|
||||
self.rts.startRun()
|
||||
self.rts.saveDataToFile()
|
||||
elif "Stopped" in val and self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"] and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["end"]:
|
||||
self.rts.endRun()
|
||||
self.rts.saveDataToFile()
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1, "percentRunTime30Days", self.rts.calculateRunPercentMultiDay(), 0,'PiFlow')
|
||||
elif chan.mesh_name == "frequency":
|
||||
if val > 0:
|
||||
self.rts.addHertzDataPoint(val)
|
||||
self.rts.saveDataToFile()
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
self.sendtodbDev(1, "avgFrequency30Days", self.rts.calculateAverageHertzMultiDay(),0,'PiFlow')
|
||||
elif chan.mesh_name == "remote_start":
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
PERSIST["state_timer"] = time.time()
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
else:
|
||||
if chan.check(val, self.force_send):
|
||||
self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow')
|
||||
|
||||
except Exception as e:
|
||||
log.warning("An error occured: {}".format(e))
|
||||
time.sleep(3)
|
||||
|
||||
|
||||
# print("PiFlow driver still alive...")
|
||||
if self.force_send:
|
||||
if send_loops > 2:
|
||||
log.warning("Turning off force_send")
|
||||
self.force_send = False
|
||||
send_loops = 0
|
||||
else:
|
||||
send_loops += 1
|
||||
|
||||
|
||||
if (now - self.public_ip_address_last_checked) > IP_CHECK_PERIOD:
|
||||
self._check_ip_address()
|
||||
time.sleep(10)
|
||||
|
||||
def _check_ip_address(self):
|
||||
"""Check the public IP address and send to Meshify if changed."""
|
||||
self.public_ip_address_last_checked = time.time()
|
||||
test_public_ip = get_public_ip_address()
|
||||
test_public_ip = test_public_ip[:-1]
|
||||
test_private_ip = get_private_ip_address()
|
||||
if not test_public_ip == self.public_ip_address and not test_public_ip == "0.0.0.0":
|
||||
self.sendtodbDev(1, 'public_ip_address', test_public_ip, 0, 'PiFlow')
|
||||
self.public_ip_address = test_public_ip
|
||||
if not test_private_ip == self.private_ip_address:
|
||||
self.sendtodbDev(1, 'private_ip_address', test_private_ip, 0, 'PiFlow')
|
||||
self.private_ip_address = test_private_ip
|
||||
|
||||
def PiFlow_sync(self, name, value):
|
||||
"""Sync all data from the driver."""
|
||||
self.force_send = True
|
||||
# self.sendtodb("log", "synced", 0)
|
||||
return True
|
||||
|
||||
def PiFlow_flowmeternumber(self, name, unit_number):
|
||||
"""Change the unit number for the PiFlow flow meter"""
|
||||
unit_number = int(unit_number)
|
||||
if drive_enabled:
|
||||
for chan in CHANNELS[0:8]:
|
||||
chan.unit_number = unit_number
|
||||
PERSIST['flowmeter'] = unit_number
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
return True
|
||||
else:
|
||||
for chan in CHANNELS:
|
||||
chan.unit_number = unit_number
|
||||
PERSIST['flowmeter'] = unit_number
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
self.sendtodbDev(1, 'flowmeternumber', unit_number, 0,'PiFlow')
|
||||
return True
|
||||
return False
|
||||
|
||||
def PiFlow_drivenumber(self, name, unit_number):
|
||||
"""Change the unit number for the PiFlow drive"""
|
||||
unit_number = int(unit_number)
|
||||
for chan in CHANNELS[8:]:
|
||||
chan.unit_number = unit_number
|
||||
|
||||
PERSIST['drive'] = unit_number
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
self.sendtodbDev(1, 'drivenumber', unit_number, 0,'PiFlow')
|
||||
return True
|
||||
|
||||
def PiFlow_reboot(self, name, value):
|
||||
os.system('reboot')
|
||||
return True
|
||||
|
||||
def PiFlow_drive_enabled(self, name, value):
|
||||
value = int(value)
|
||||
if value == 1:
|
||||
PERSIST['drive_enabled'] = True
|
||||
else:
|
||||
PERSIST['drive_enabled'] = False
|
||||
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
self.sendtodbDev(1, 'drive_enabled', value, 0,'PiFlow')
|
||||
return True
|
||||
|
||||
def PiFlow_write(self, name, value):
|
||||
"""Write a value to the device via modbus"""
|
||||
new_val = json.loads(str(value).replace("'", '"'))
|
||||
addr_n = int(new_val['addr'])
|
||||
reg_n = int(new_val['reg'])
|
||||
val_n = new_val['val']
|
||||
for chan in CHANNELS:
|
||||
if chan.unit_number == addr_n and chan.register_number == reg_n:
|
||||
write_res = chan.write(val_n)
|
||||
|
||||
log.info("Result of PiFlow_write(self, {}, {}) = {}".format(name, value, write_res))
|
||||
return write_res
|
||||
"""
|
||||
def PiFlow_start(self, name, value):
|
||||
if isVFD:
|
||||
#do something with the plc
|
||||
log.info("Sending START signal to PLC")
|
||||
else:
|
||||
log.info("Sending START signal to Drive via relay {}".format(Relay_Ch1))
|
||||
GPIO.output(Relay_Ch1,GPIO.LOW)
|
||||
PERSIST["state"] = True
|
||||
PERSIST["state_timer"] = time.time()
|
||||
persistence.store(PERSIST,"persist.json")
|
||||
|
||||
return True
|
||||
|
||||
def PiFlow_stop(self, name, value):
|
||||
if isVFD:
|
||||
log.info("Sending STOP signal to PLC")
|
||||
#do something with the plc
|
||||
else:
|
||||
log.info("Sending STOP signal to Drive")
|
||||
GPIO.output(Relay_Ch1,GPIO.HIGH)
|
||||
PERSIST["state"] = False
|
||||
PERSIST["state_timer"] = time.time()
|
||||
persistence.store(PERSIST, "persist.json")
|
||||
return True
|
||||
"""
|
||||
def totalize(self,val, yesterday, day, hour, minute, yesterday_total_midnight, yesterday_total,channel):
|
||||
if (yesterday_total == 0 and yesterday_total_midnight == 0) or (yesterday_total == None or yesterday_total_midnight == None):
|
||||
yesterday_total_midnight = val
|
||||
PERSIST['yesterday_total_midnight_'+channel] = yesterday_total_midnight
|
||||
persistence.store(PERSIST, 'persist.json')
|
||||
today_total = val - yesterday_total_midnight
|
||||
if hour == 0 and minute == 0 and not(day == yesterday):
|
||||
self.rts.manageTime()
|
||||
yesterday_total = today_total
|
||||
yesterday_total_midnight = val
|
||||
today_total = val - yesterday_total_midnight
|
||||
yesterday = day
|
||||
PERSIST['yesterday_'+channel] = yesterday
|
||||
PERSIST['yesterday_total_'+channel] = yesterday_total
|
||||
PERSIST['yesterday_total_midnight_'+channel] = yesterday_total_midnight
|
||||
persistence.store(PERSIST,'persist.json')
|
||||
|
||||
return today_total,yesterday_total
|
||||
|
||||
def sendStatus(self,status):
|
||||
status_string = ""
|
||||
|
||||
fault_codes = {
|
||||
0: "",
|
||||
2: "Auxiliary Input",
|
||||
3: "Power Loss",
|
||||
4: "UnderVoltage",
|
||||
5: "OverVoltage",
|
||||
7: "Motor Overload",
|
||||
8: "Heatsink OvrTemp",
|
||||
9: "Thermister OvrTemp",
|
||||
10: "DynBrake OverTemp",
|
||||
12: "HW OverCurrent",
|
||||
13: "Ground Fault",
|
||||
14: "Ground Warning",
|
||||
15: "Load Loss",
|
||||
17: "Input Phase Loss",
|
||||
18: "Motor PTC Trip",
|
||||
19: "Task Overrun",
|
||||
20: "TorqPrv Spd Band",
|
||||
21: "Output PhaseLoss",
|
||||
24: "Decel Inhibit",
|
||||
25: "OverSpeed Limit",
|
||||
26: "Brake Slipped",
|
||||
27: "Torq Prove Cflct",
|
||||
28: "TP Encls Config",
|
||||
29: "Analog In Loss",
|
||||
33: "AuRsts Exhausted",
|
||||
35: "IPM OverCurrent",
|
||||
36: "SW OverCurrent",
|
||||
38: "Phase U to Grnd",
|
||||
39: "Phase V to Grnd",
|
||||
40: "Phase W to Grnd",
|
||||
41: "Phase UV Short",
|
||||
42: "Phase VW Short",
|
||||
43: "Phase WU Short",
|
||||
44: "Phase UNegToGrnd",
|
||||
45: "Phase VNegToGrnd",
|
||||
46: "Phase WNegToGrnd",
|
||||
48: "System Defaulted",
|
||||
49: "Drive Powerup",
|
||||
51: "Clr Fault Queue",
|
||||
55: "Ctrl Bd Overtemp",
|
||||
59: "Invalid Code",
|
||||
61: "Shear Pin 1",
|
||||
62: "Shear Pin 2",
|
||||
64: "Drive Overload",
|
||||
67: "Pump Off",
|
||||
71: "Port 1 Adapter",
|
||||
72: "Port 2 Adapter",
|
||||
73: "Port 3 Adapter",
|
||||
74: "Port 4 Adapter",
|
||||
75: "Port 5 Adapter",
|
||||
76: "Port 6 Adapter",
|
||||
77: "IR Volts Range",
|
||||
78: "FluxAmpsRef Rang",
|
||||
79: "Excessive Load",
|
||||
80: "AutoTune Aborted",
|
||||
81: "Port 1 DPI Loss",
|
||||
82: "Port 2 DPI Loss",
|
||||
83: "Port 3 DPI Loss",
|
||||
84: "Port 4 DPI Loss",
|
||||
85: "Port 5 DPI Loss",
|
||||
86: "Port 6 DPI Loss",
|
||||
87: "Ixo VoltageRange",
|
||||
91: "Pri VelFdbk Loss",
|
||||
93: "Hw Enable Check",
|
||||
94: "Alt VelFdbk Loss",
|
||||
95: "Aux VelFdbk Loss",
|
||||
96: "PositionFdbkLoss",
|
||||
97: "Auto Tach Switch",
|
||||
100: "Parameter Chksum",
|
||||
101: "PwrDn NVS Blank",
|
||||
102: "NVS Not Blank",
|
||||
103: "PwrDn Nvs Incomp",
|
||||
104: "Pwr Brd Checksum",
|
||||
106: "Incompat MCB-PB",
|
||||
107: "Replaced MCB-PB",
|
||||
108: "Anlg Cal Chksum",
|
||||
110: "Ivld Pwr Bd Data",
|
||||
111: "PwrBd Invalid ID",
|
||||
112: "PwrBd App MinVer",
|
||||
113: "Tracking DataErr",
|
||||
115: "PwrDn Table Full",
|
||||
116: "PwrDnEntry2Large",
|
||||
117: "PwrDn Data Chksm",
|
||||
118: "PwrBd PwrDn Chks",
|
||||
124: "App ID Changed",
|
||||
125: "Using Backup App",
|
||||
134: "Start on PowerUp",
|
||||
137: "Ext Prechrg Err",
|
||||
138: "Precharge Open",
|
||||
141: "Autn Enc Angle",
|
||||
142: "Autn Spd Rstrct",
|
||||
143: "AutoTune CurReg",
|
||||
144: "AutoTune Inertia",
|
||||
145: "AutoTune Travel",
|
||||
13037: "Net IO Timeout"
|
||||
}
|
||||
|
||||
if status['vfd_active'] == "Stopped":
|
||||
status_string = status_string + status['vfd_active'] + "; " + status['vfd_ready']
|
||||
else:
|
||||
status_string = status_string + status['vfd_active']
|
||||
if status['vfd_rev']:
|
||||
status_string = status_string + '; ' + status['vfd_rev']
|
||||
if status['vfd_fwd']:
|
||||
status_string = status_string + '; ' + status['vfd_fwd']
|
||||
if status['vfd_atreference']:
|
||||
status_string = status_string + '; ' + status['vfd_atreference']
|
||||
alarm_string = ""
|
||||
if status['vfd_faulted'] == "Drive Faulted":
|
||||
status_string = status_string + '; ' + status['vfd_faulted']
|
||||
if status['vfd_commloss']:
|
||||
alarm_string = alarm_string + '; ' + status['vfd_commloss']
|
||||
if status['vfd_fbkalarm']:
|
||||
alarm_string = alarm_string + '; ' + status['vfd_fbkalarm']
|
||||
if status['vfd_faultcode']:
|
||||
alarm_string = alarm_string + '; ' + "Fault: {} Fault code: {}".format(fault_codes[status['vfd_faultcode']],str(status['vfd_faultcode']))
|
||||
if status['minspeedalarm']:
|
||||
alarm_string = alarm_string + '; ' + status['minspeedalarm']
|
||||
if status['pumpedoff']:
|
||||
alarm_string = alarm_string + '; ' + status['pumpedoff']
|
||||
if status['lockedout']:
|
||||
alarm_string = alarm_string + '; ' + status['lockedout']
|
||||
if status['tubingpressurehi']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurehi']
|
||||
if status['tubingpressurehihi']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurehihi']
|
||||
if status['tubingpressurelo']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurelo']
|
||||
if status['tubingpressurelolo']:
|
||||
alarm_string = alarm_string + '; ' + status['tubingpressurelolo']
|
||||
if status['flowmeterhihi']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterhihi']
|
||||
if status['flowmeterhi']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterhi']
|
||||
if status['flowmeterlolo']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterlolo']
|
||||
if status['flowmeterlo']:
|
||||
alarm_string = alarm_string + '; ' + status['flowmeterlo']
|
||||
if status['fluidlevellolo']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevellolo']
|
||||
if status['fluidlevello']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevello']
|
||||
if status['fluidlevelhi']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevelhi']
|
||||
if status['fluidlevelhihi']:
|
||||
alarm_string = alarm_string + '; ' + status['fluidlevelhihi']
|
||||
try:
|
||||
if status_string and status_string[0] == '; ':
|
||||
status_string = status_string[1:]
|
||||
if status_string and status_string[-1] == '; ':
|
||||
status_string = status_string[:-1]
|
||||
if alarm_string and alarm_string[0] == '; ':
|
||||
alarm_string = alarm_string[1:]
|
||||
if alarm_string and alarm_string[-1] == '; ':
|
||||
alarm_string = alarm_string[:-1]
|
||||
except Exception as e:
|
||||
log.warning("Error in send status semicolon: {}".format(e))
|
||||
|
||||
|
||||
if self.status != status_string:
|
||||
self.status = status_string
|
||||
log.info("Sending {} for {}".format(status_string, 'run_status'))
|
||||
self.sendtodbDev(1, 'run_status', status_string, 0, 'PiFlow')
|
||||
if "Operating" in status_string and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"]:
|
||||
self.rts.startRun()
|
||||
self.rts.saveDataToFile()
|
||||
elif "Stopped" in status_string and self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["start"] and not self.rts.runs[self.rts.todayString]["run_" + str(self.rts.currentRun)]["end"]:
|
||||
self.rts.endRun()
|
||||
self.rts.saveDataToFile()
|
||||
self.sendtodbDev(1, "percentRunTime30Days", self.rts.calculateRunPercentMultiDay(), 0,'PiFlow')
|
||||
if self.alarm != alarm_string:
|
||||
self.alarm = alarm_string
|
||||
log.info("Sending {} for {}".format(alarm_string, 'fault_a'))
|
||||
self.sendtodbDev(1, 'fault_a', alarm_string, 0 , 'PiFlow')
|
||||
|
||||
|
||||
|
||||
|
||||
92
meshifyDrivers/piflow/Tags.py
Normal file
92
meshifyDrivers/piflow/Tags.py
Normal file
@@ -0,0 +1,92 @@
|
||||
from Channel import PLCChannel,Channel, ModbusChannel, status_codes, fault_code_a, fault_code_b, volume_units, totalizer_units
|
||||
import persistence
|
||||
|
||||
PERSIST = persistence.load('persist.json')
|
||||
flowmeter_unit_number = PERSIST['flowmeter']
|
||||
drive_enabled = PERSIST['drive_enabled']
|
||||
isVFD = PERSIST['isVFD']
|
||||
if drive_enabled:
|
||||
drive_unit_number = PERSIST['drive']
|
||||
try:
|
||||
plc_ip = PERSIST['plc_ip']
|
||||
except:
|
||||
PERSIST['plc_ip'] = '192.168.1.12'
|
||||
persistence.store(PERSIST)
|
||||
if isVFD:
|
||||
tags = [
|
||||
PLCChannel(plc_ip,'vfd_atreference','sts_VFD_AtReference','BOOL',0,3600,map_={0: "", 1: "At speed"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_rev','sts_VFD_REV','BOOL',0,3600,map_={0: "", 1: "Operating in Reverse"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_fwd','sts_VFD_FWD','BOOL',0,3600,map_={0: "", 1: "Operating in Forward"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_active','sts_VFD_Active','BOOL',0,3600,map_={0: "Stopped", 1: "Running"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_ready','sts_VFD_Ready','BOOL',0,3600,map_={0: "Drive Not Ready", 1: "Drive Ready"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_faultcode','sts_VFD_FaultCode','REAL',0,3600, plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_faulted','AL0_VFD','BOOL',0,3600,map_={0: "", 1: "Drive Faulted"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_commloss','AL0_VFDComLoss','BOOL',0,3600,map_={0: "", 1: "Drive Comms Loss"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'vfd_fbkalarm','AL0_VFD_FBAlarm','BOOL',0,3600,map_={0: "", 1: "Drive Lost Feedback"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurehi','AL0_TubingPressureHi','BOOL',0,3600,map_={0: "", 1: "High Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurehihi','AL0_TubingPressureHiHi','BOOL',0,3600,map_={0: "", 1: "High High Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurelo','AL0_TubingPressureLo','BOOL',0,3600,map_={0: "", 1: "Low Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'tubingpressurelolo','AL0_TubingPressureLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low Tubing Pressure"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterhihi','AL0_FlowMeterHiHi','BOOL',0,3600,map_={0: "", 1: "High High FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterhi','AL0_FlowMeterHi','BOOL',0,3600,map_={0: "", 1: "High FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterlolo','AL0_FlowMeterLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'flowmeterlo','AL0_FlowMeterLo','BOOL',0,3600,map_={0: "", 1: "Low FM Flow Rate"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'minspeedalarm','AL0_MinSpeedAlarm','BOOL',0,3600,map_={0: "", 1: "Drive not able to maintain min speed"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'pumpedoff','AL0_PumpedOff','BOOL',0,3600,map_={0: "", 1: "Pumped Off"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevellolo','AL0_FluidLevelLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevello','AL0_FluidLevelLo','BOOL',0,3600,map_={0: "", 1: "Low Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevelhi','AL0_FluidLevelHi','BOOL',0,3600,map_={0: "", 1: "High Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'fluidlevelhihi','AL0_FluidLevelHiHi','BOOL',0,3600,map_={0: "", 1: "High High Fluid Level"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'lockedout','AlarmLockOut','BOOL',0,3600,map_={0: "", 1: "Locked Out Repeated Alarms"},plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'volume_flow','Val_FlowmeterFR','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'current','val_VFD_OutputCurrent','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'frequency','val_VFD_ActualSpeed','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'pid_feedback','val_FluidLevel','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'totalizer_1','Val_FlowMeterT1','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'totalizer_2','Val_FlowMeterT2','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'totalizer_3','Val_FlowMeterT3','REAL',5,3600,plc_type='Micro800'),
|
||||
PLCChannel(plc_ip,'volume_flow_units','CMD_FlowMeterUnit','BOOL',1,3600,map_={0: "GPM", 1: "BPD"},plc_type='Micro800')
|
||||
]
|
||||
else:
|
||||
if drive_enabled:
|
||||
tags = [
|
||||
ModbusChannel('volume_flow', 3873, 'FLOAT', 10, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_1', 2609, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_2', 2809, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_3', 3009, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('volume_flow_units', 2102, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=volume_units),
|
||||
ModbusChannel('totalizer_1_units', 4603, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_2_units', 4604, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_3_units', 4605, 'INTEGER', 1,86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('remote_start', 0000, 'INTEGER', 1, 86400, channel_size=1, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('run_status', 772, 'STRING', 0, 3600, channel_size=1, unit_number=drive_unit_number, transform_fn=status_codes),
|
||||
ModbusChannel('frequency', 784, 'INTEGER', 2, 3600, channel_size=2, unit_number=drive_unit_number,scaling=2 ),
|
||||
ModbusChannel('current', 783, 'INTEGER', 2, 3600, channel_size=2, unit_number=drive_unit_number,scaling=1 ),
|
||||
ModbusChannel('fault_a', 815, 'STRING', 1, 3600, channel_size=1, unit_number=drive_unit_number,transform_fn=fault_code_a),
|
||||
ModbusChannel('fault_b', 816, 'STRING', 1, 3600, channel_size=1, unit_number=drive_unit_number,transform_fn=fault_code_b),
|
||||
ModbusChannel('pid_ref', 791, 'INTEGER', 5, 3600, channel_size=1, unit_number=drive_unit_number,scaling=1),
|
||||
ModbusChannel('pid_feedback', 792, 'INTEGER', 5, 3600, channel_size=1, unit_number=drive_unit_number,scaling=1),
|
||||
ModbusChannel('motor_rated_current', 4896, 'INTEGER', 300, 86400, channel_size=1, unit_number=drive_unit_number,scaling=1),
|
||||
ModbusChannel('sleep_delay', 4924, 'INTEGER', 5, 86400, channel_size=1, unit_number=drive_unit_number, scaling=1)
|
||||
]
|
||||
else:
|
||||
tags = [
|
||||
ModbusChannel('volume_flow', 3873, 'FLOAT', 10, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_1', 2609, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_2', 2809, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('totalizer_3', 3009, 'FLOAT', 100, 3600,channel_size=2, unit_number=flowmeter_unit_number),
|
||||
ModbusChannel('volume_flow_units', 2102, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=volume_units),
|
||||
ModbusChannel('totalizer_1_units', 4603, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_2_units', 4604, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('totalizer_3_units', 4605, 'INTEGER', 1, 86400,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units),
|
||||
ModbusChannel('remote_start', 0000, 'BOOL', 1, 86400, channel_size=1, unit_number=flowmeter_unit_number)
|
||||
]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
BIN
meshifyDrivers/piflow/__pycache__/runtimeStats.cpython-39.pyc
Normal file
BIN
meshifyDrivers/piflow/__pycache__/runtimeStats.cpython-39.pyc
Normal file
Binary file not shown.
17
meshifyDrivers/piflow/config.txt
Normal file
17
meshifyDrivers/piflow/config.txt
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
|
||||
"driverFileName":"PiFlow.py",
|
||||
"deviceName":"piflow",
|
||||
"driverId":"0280",
|
||||
"releaseVersion":"28",
|
||||
"files": {
|
||||
"file1":"PiFlow.py",
|
||||
"file2":"Channel.py",
|
||||
"file3":"file_logger.py",
|
||||
"file4":"Tags.py",
|
||||
"file5":"utilities.py",
|
||||
"file6":"persistence.py",
|
||||
"file7":"runtimeStats.py"
|
||||
}
|
||||
|
||||
}
|
||||
205
meshifyDrivers/piflow/device_base.py
Normal file
205
meshifyDrivers/piflow/device_base.py
Normal file
@@ -0,0 +1,205 @@
|
||||
import types
|
||||
import traceback
|
||||
import binascii
|
||||
import threading
|
||||
import time
|
||||
import thread
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
class deviceBase():
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
self.offset = offset
|
||||
self.company = companyId
|
||||
self.name = name
|
||||
self.number = number
|
||||
self.q = Q
|
||||
self.deviceName = name + '_[' + mac + ':' + number[0:2] + ':' + number[2:] + ']!'
|
||||
self.chName = "M1" + '_[' + mac + ':'
|
||||
self.chName2 = '_[' + mac + ':'
|
||||
print 'device name is:'
|
||||
print self.deviceName
|
||||
mac2 = mac.replace(":", "")
|
||||
self.mac = mac2.upper()
|
||||
self.address = 1
|
||||
self.debug = True
|
||||
self.mcu = mcu
|
||||
self.firstRun = True
|
||||
self.mqtt = mqtt
|
||||
self.nodes = Nodes
|
||||
#local dictionary of derived nodes ex: localNodes[tank_0199] = self
|
||||
self.localNodes = {}
|
||||
os.system("chmod 777 /root/reboot")
|
||||
os.system("echo nameserver 8.8.8.8 > /etc/resolv.conf")
|
||||
|
||||
|
||||
def sendtodbLoc(self, ch, channel, value, timestamp, deviceName, mac):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
#make the techname
|
||||
lst = textwrap.wrap(str(mac), width=2)
|
||||
tech = ""
|
||||
for i in range(len(lst)):
|
||||
tech += lst[i].lower() + ":"
|
||||
|
||||
|
||||
chName2 = '_[' + tech
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
if len(ch) > 2:
|
||||
ch = ch[:-2]
|
||||
|
||||
dname = deviceName + chName2 + str(ch) + ":98]!"
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDevJSON(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbLora(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
if ":" not in ch:
|
||||
ch = ch[0:2] + ":" + ch[2:4]
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch).replace(':', "")
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + "]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbDev(self, ch, channel, value, timestamp, deviceName):
|
||||
|
||||
|
||||
#this will add your derived nodes the master nodes list, allowing them to receive sets!!
|
||||
localNodesName = deviceName + "_" + str(ch) + "99"
|
||||
|
||||
if not self.localNodes.has_key(localNodesName):
|
||||
self.localNodes[localNodesName] = True
|
||||
self.nodes[localNodesName] = self
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(int(ch))
|
||||
|
||||
dname = deviceName + self.chName2 + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendToTB(self, payload):
|
||||
topic = 'v1/devices/me/telemetry'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendToTBAttributes(self, payload):
|
||||
topic = 'v1/devices/me/attributes'
|
||||
print(topic, payload)
|
||||
self.q.put([topic, payload, 0])
|
||||
|
||||
def sendtodbCH(self, ch, channel, value, timestamp):
|
||||
|
||||
|
||||
if int(ch) < 10:
|
||||
ch = "0" + str(ch)
|
||||
|
||||
dname = self.chName + str(ch) + ":99]!"
|
||||
|
||||
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, dname, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodb(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":"%s", "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
|
||||
def sendtodbJSON(self, channel, value, timestamp):
|
||||
|
||||
if int(timestamp) == 0:
|
||||
timestamp = self.getTime()
|
||||
if timestamp < 1400499858:
|
||||
return
|
||||
else:
|
||||
timestamp = str(int(timestamp) + int(self.offset))
|
||||
|
||||
topic = 'meshify/db/%s/%s/%s/%s' % (self.company, self.mac, self.deviceName, channel)
|
||||
print topic
|
||||
msg = """[ { "value":%s, "timestamp":"%s" } ]""" % (str(value), str(timestamp))
|
||||
print msg
|
||||
self.q.put([topic, msg, 0])
|
||||
def getTime(self):
|
||||
return str(int(time.time() + int(self.offset)))
|
||||
|
||||
|
||||
|
||||
|
||||
18
meshifyDrivers/piflow/file_logger.py
Normal file
18
meshifyDrivers/piflow/file_logger.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Logging setup for PiFlow"""
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import sys
|
||||
|
||||
log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s')
|
||||
log_file = './PiFlow.log'
|
||||
my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=500*1024,
|
||||
backupCount=2, encoding=None, delay=0)
|
||||
my_handler.setFormatter(log_formatter)
|
||||
my_handler.setLevel(logging.INFO)
|
||||
filelogger = logging.getLogger('PiFlow')
|
||||
filelogger.setLevel(logging.INFO)
|
||||
filelogger.addHandler(my_handler)
|
||||
|
||||
console_out = logging.StreamHandler(sys.stdout)
|
||||
console_out.setFormatter(log_formatter)
|
||||
filelogger.addHandler(console_out)
|
||||
20
meshifyDrivers/piflow/modbusTester.py
Normal file
20
meshifyDrivers/piflow/modbusTester.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import minimalmodbus
|
||||
|
||||
minimalmodbus.BAUDRATE = 9600
|
||||
minimalmodbus.STOPBITS = 1
|
||||
address = 123
|
||||
|
||||
instrument = minimalmodbus.Instrument('/dev/ttyS0', address) #device, modbus slave address
|
||||
instrument.debug = True
|
||||
for _ in range(3):
|
||||
try:
|
||||
value = instrument.read_float(3873) #register -1 for float
|
||||
print("Flow Rate from Flow Meter: {}".format(value))
|
||||
except Exception as e:
|
||||
print("Error: {}".format(e))
|
||||
|
||||
try:
|
||||
value = instrument.read_float(784) #register -1 for float
|
||||
print("Frequency from Drive: {}".format(value))
|
||||
except Exception as e:
|
||||
print("Error: {}".format(e))
|
||||
21
meshifyDrivers/piflow/persistence.py
Normal file
21
meshifyDrivers/piflow/persistence.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Data persistance functions."""
|
||||
# if more advanced persistence is needed, use a sqlite database
|
||||
import json
|
||||
|
||||
|
||||
def load(filename="persist.json"):
|
||||
"""Load persisted settings from the specified file."""
|
||||
try:
|
||||
with open(filename, 'r') as persist_file:
|
||||
return json.load(persist_file)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def store(persist_obj, filename="persist.json"):
|
||||
"""Store the persisting settings into the specified file."""
|
||||
try:
|
||||
with open(filename, 'w') as persist_file:
|
||||
return json.dump(persist_obj, persist_file, indent=4)
|
||||
except Exception:
|
||||
return False
|
||||
172
meshifyDrivers/piflow/runtimeStats.py
Normal file
172
meshifyDrivers/piflow/runtimeStats.py
Normal file
@@ -0,0 +1,172 @@
|
||||
from datetime import datetime as dt
|
||||
import time
|
||||
import json
|
||||
import math
|
||||
|
||||
class RuntimeStats:
|
||||
|
||||
def __init__(self):
|
||||
self.runs = {}
|
||||
self.currentRun = 0
|
||||
self.today = ""
|
||||
self.todayString = ""
|
||||
|
||||
def manageTime(self):
|
||||
if self.todayString != dt.strftime(dt.today(), "%Y-%m-%d"):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.mktime(dt.strptime(self.todayString + " 23:59:59", "%Y-%m-%d %H:%M:%S").timetuple())
|
||||
self.addDay()
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
while (dt.strptime(days[-1],"%Y-%m-%d") - dt.strptime(days[0], "%Y-%m-%d")).days > 30:
|
||||
self.removeDay(day=days[0])
|
||||
days = list(self.runs.keys())
|
||||
days.sort()
|
||||
|
||||
def addHertzDataPoint(self, frequency):
|
||||
if frequency > 0:
|
||||
self.manageTime()
|
||||
try:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"].append(frequency)
|
||||
except:
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"] = [frequency]
|
||||
|
||||
def startRun(self):
|
||||
if self.checkRunning():
|
||||
self.endRun()
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] = time.time()
|
||||
|
||||
def endRun(self):
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.time()
|
||||
self.currentRun += 1
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def checkRunning(self):
|
||||
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def addDay(self):
|
||||
self.today = dt.today()
|
||||
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
|
||||
self.currentRun = 1
|
||||
self.runs[self.todayString] = {}
|
||||
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
|
||||
|
||||
def countRunsDay(self, day=None):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
return len(self.runs[day].keys())
|
||||
|
||||
def countRunsMultiDay(self, numDays=30):
|
||||
total_runs = 0
|
||||
for day in list(self.runs.keys()):
|
||||
total_runs += self.countRunsDay(day=day)
|
||||
return total_runs
|
||||
|
||||
def calculateAverageHertzDay(self, day=None, returnArray=False):
|
||||
dayFrequencies = []
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
try:
|
||||
dayFrequencies += self.runs[day][run]["frequencies"]
|
||||
except Exception as e:
|
||||
print("{} missing frequency data for {}".format(day,run))
|
||||
if returnArray:
|
||||
return dayFrequencies
|
||||
return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)
|
||||
|
||||
def calculateAverageHertzMultiDay(self, numDays=30):
|
||||
self.manageTime()
|
||||
frequencies = []
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
try:
|
||||
frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)
|
||||
except Exception as e:
|
||||
print("{} missing frequency data".format(day))
|
||||
if len(frequencies):
|
||||
return round(math.fsum(frequencies)/len(frequencies), 2)
|
||||
return 0
|
||||
|
||||
def calculateRunTimeDay(self, day=None, convertToHours=True):
|
||||
total_time = 0
|
||||
if not day:
|
||||
day = self.todayString
|
||||
for run in list(self.runs[day].keys()):
|
||||
total_time = self.runs[day][run]["end"] - self.runs[day][run]["start"] + total_time
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):
|
||||
total_time = 0
|
||||
for day in list(self.runs.keys()):
|
||||
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
|
||||
total_time += self.calculateRunTimeDay(day=day, convertToHours=False)
|
||||
if convertToHours:
|
||||
return self.convertSecondstoHours(total_time)
|
||||
return total_time
|
||||
|
||||
def calculateRunPercentDay(self, day=None, precise=False):
|
||||
if not day:
|
||||
day = self.todayString
|
||||
if precise:
|
||||
return (self.calculateRunTimeDay(day=day)/24) * 100
|
||||
return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)
|
||||
|
||||
|
||||
def calculateRunPercentMultiDay(self, numDays=30, precise=False):
|
||||
self.manageTime()
|
||||
if precise:
|
||||
return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100
|
||||
return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)
|
||||
|
||||
def removeDay(self, day=None):
|
||||
if not day:
|
||||
raise Exception("Day can not be None")
|
||||
print("removing day {}".format(day))
|
||||
del self.runs[day]
|
||||
|
||||
def convertSecondstoHours(self, seconds):
|
||||
return round(seconds / (60*60),2)
|
||||
|
||||
def loadDataFromFile(self, filePath="./runtimestats.json"):
|
||||
try:
|
||||
with open(filePath, "r") as f:
|
||||
temp = json.load(f)
|
||||
self.runs = temp["data"]
|
||||
self.currentRun = temp["current_run"]
|
||||
self.today = dt.strptime(temp["current_day"], "%Y-%m-%d")
|
||||
self.todayString = temp["current_day"]
|
||||
self.manageTime()
|
||||
except:
|
||||
print("Could not find file at {}".format(filePath))
|
||||
print("creating file")
|
||||
self.addDay()
|
||||
try:
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
def saveDataToFile(self, filePath="./runtimestats.json"):
|
||||
try:
|
||||
print("Saving Runs")
|
||||
with open(filePath, "w") as f:
|
||||
d = {
|
||||
"current_run": self.currentRun,
|
||||
"current_day": self.todayString,
|
||||
"data": self.runs
|
||||
}
|
||||
json.dump(d, f, indent=4)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
638
meshifyDrivers/piflow/runtimestats.ipynb
Normal file
638
meshifyDrivers/piflow/runtimestats.ipynb
Normal file
@@ -0,0 +1,638 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from datetime import datetime as dt\n",
|
||||
"from datetime import timedelta as td\n",
|
||||
"from time import sleep\n",
|
||||
"import json\n",
|
||||
"import math"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 106,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"class RuntimeStats:\n",
|
||||
" \n",
|
||||
" def __init__(self):\n",
|
||||
" self.runs = {}\n",
|
||||
" self.currentRun = 0\n",
|
||||
" self.today = \"\"\n",
|
||||
" self.todayString = \"\"\n",
|
||||
"\n",
|
||||
" def manageTime(self):\n",
|
||||
" if self.today != dt.today():\n",
|
||||
" if self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"start\"] and not self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"]:\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"] = dt.timestamp(dt.strptime(self.todayString + \" 23:59:59\", \"%Y-%m-%d %H:%M:%S\"))\n",
|
||||
" self.addDay()\n",
|
||||
" days = list(self.runs.keys())\n",
|
||||
" days.sort()\n",
|
||||
" while (dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days > 30:\n",
|
||||
" self.removeDay(day=days[0])\n",
|
||||
" days = list(self.runs.keys())\n",
|
||||
" days.sort()\n",
|
||||
"\n",
|
||||
" def addHertzDataPoint(self, frequency):\n",
|
||||
" if frequency > 0:\n",
|
||||
" self.manageTime()\n",
|
||||
" try:\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"frequencies\"].append(frequency)\n",
|
||||
" except:\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"frequencies\"] = [frequency]\n",
|
||||
"\n",
|
||||
" def startRun(self):\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"start\"] = dt.timestamp(dt.now())\n",
|
||||
"\n",
|
||||
" def endRun(self):\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)][\"end\"] = dt.timestamp(dt.now()) \n",
|
||||
"\n",
|
||||
" def addDay(self):\n",
|
||||
" self.today = dt.today()\n",
|
||||
" self.todayString = dt.strftime(self.today, \"%Y-%m-%d\")\n",
|
||||
" self.currentRun = 1\n",
|
||||
" self.runs[self.todayString] = {}\n",
|
||||
" self.runs[self.todayString][\"run_\" + str(self.currentRun)] = {\"start\":0, \"end\": 0, \"frequencies\":[]}\n",
|
||||
"\n",
|
||||
" def countRunsDay(self, day=None):\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" return len(self.runs[day].keys())\n",
|
||||
"\n",
|
||||
" def countRunsMultiDay(self, numDays=30):\n",
|
||||
" total_runs = 0\n",
|
||||
" for day in list(self.runs.keys()):\n",
|
||||
" total_runs += self.countRunsDay(day=day)\n",
|
||||
" return total_runs\n",
|
||||
"\n",
|
||||
" def calculateAverageHertzDay(self, day=None, returnArray=False):\n",
|
||||
" dayFrequencies = []\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" for run in list(self.runs[day].keys()):\n",
|
||||
" try:\n",
|
||||
" dayFrequencies += self.runs[day][run][\"frequencies\"]\n",
|
||||
" except Exception as e:\n",
|
||||
" print(\"{} missing frequency data for {}\".format(day,run))\n",
|
||||
" if returnArray:\n",
|
||||
" return dayFrequencies\n",
|
||||
" return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)\n",
|
||||
"\n",
|
||||
" def calculateAverageHertzMultiDay(self, numDays=30):\n",
|
||||
" frequencies = []\n",
|
||||
" for day in list(self.runs.keys()):\n",
|
||||
" try:\n",
|
||||
" frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)\n",
|
||||
" except Exception as e:\n",
|
||||
" print(\"{} missing frequency data\".format(day))\n",
|
||||
" return round(math.fsum(frequencies)/len(frequencies), 2)\n",
|
||||
" \n",
|
||||
" def calculateRunTimeDay(self, day=None, convertToHours=True):\n",
|
||||
" total_time = 0\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" for run in list(self.runs[day].keys()):\n",
|
||||
" total_time = self.runs[day][run][\"end\"] - self.runs[day][run][\"start\"] + total_time\n",
|
||||
" if convertToHours:\n",
|
||||
" return RuntimeStats.convertSecondstoHours(total_time)\n",
|
||||
" return total_time\n",
|
||||
"\n",
|
||||
" def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):\n",
|
||||
" total_time = 0\n",
|
||||
" for day in list(self.runs.keys()):\n",
|
||||
" total_time += self.calculateRunTimeDay(day=day, convertToHours=False)\n",
|
||||
" if convertToHours:\n",
|
||||
" return RuntimeStats.convertSecondstoHours(total_time)\n",
|
||||
" return total_time\n",
|
||||
" \n",
|
||||
" def calculateRunPercentDay(self, day=None, precise=False):\n",
|
||||
" if not day:\n",
|
||||
" day = self.todayString\n",
|
||||
" if precise:\n",
|
||||
" return (self.calculateRunTimeDay(day=day)/24) * 100\n",
|
||||
" return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" def calculateRunPercentMultiDay(self, numDays=30, precise=False):\n",
|
||||
" if precise:\n",
|
||||
" return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100\n",
|
||||
" return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)\n",
|
||||
"\n",
|
||||
" def removeDay(self, day=None):\n",
|
||||
" if not day:\n",
|
||||
" raise Exception(\"Day can not be None\")\n",
|
||||
" print(\"removing day {}\".format(day))\n",
|
||||
" del self.runs[day]\n",
|
||||
" \n",
|
||||
" def convertSecondstoHours(seconds):\n",
|
||||
" return round(seconds / (60*60),2)\n",
|
||||
"\n",
|
||||
" def loadDataFromFile(self, filePath=\"../runtimestats.json\"):\n",
|
||||
" try:\n",
|
||||
" with open(filePath, \"r\") as f:\n",
|
||||
" temp = json.load(f)\n",
|
||||
" self.runs = temp[\"data\"]\n",
|
||||
" self.currentRun = temp[\"current_run\"]\n",
|
||||
" self.today = dt.strptime(temp[\"current_day\"], \"%Y-%m-%d\")\n",
|
||||
" self.todayString = temp[\"current_day\"]\n",
|
||||
" self.manageTime()\n",
|
||||
" except FileExistsError:\n",
|
||||
" print(\"Could not find file at {}\".format(filePath))\n",
|
||||
" except FileNotFoundError:\n",
|
||||
" print(\"Could not find file at {}\".format(filePath))\n",
|
||||
" print(\"creating file\")\n",
|
||||
" try:\n",
|
||||
" with open(filePath, \"w\") as f:\n",
|
||||
" d = {\n",
|
||||
" \"current_run\": self.currentRun,\n",
|
||||
" \"current_day\": self.todayString,\n",
|
||||
" \"data\": self.runs\n",
|
||||
" }\n",
|
||||
" json.dump(d, f, indent=4)\n",
|
||||
" except Exception as e:\n",
|
||||
" print(e)\n",
|
||||
" except Exception as e:\n",
|
||||
" print(e)\n",
|
||||
"\n",
|
||||
" def saveDataToFile(self, filePath=\"../runtimestats.json\"):\n",
|
||||
" try:\n",
|
||||
" print(\"Saving Runs\")\n",
|
||||
" with open(filePath, \"w+\") as f:\n",
|
||||
" d = {\n",
|
||||
" \"current_run\": self.currentRun,\n",
|
||||
" \"current_day\": self.todayString,\n",
|
||||
" \"data\": self.runs\n",
|
||||
" }\n",
|
||||
" json.dump(d, f, indent=4)\n",
|
||||
" except FileNotFoundError:\n",
|
||||
" print(\"Could not find file at {}\".format(filePath))\n",
|
||||
" except Exception as e:\n",
|
||||
" print(e)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 107,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{}\n",
|
||||
"{'2023-01-11': {'run_1': {'start': 1673465959.694776, 'frequencies': [67, 65, 59, 62, 100], 'end': 1673475545.313309}, 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883, 'frequencies': [100, 99, 98, 87, 56, 56, 58, 67]}}, '2023-01-10': {'run_1': {'start': 1673465959.694776, 'frequencies': [67, 65, 59, 62], 'end': 1673469136.691883}, 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}}, '2023-01-09': {'run_1': {'start': 1673465959.694776, 'frequencies': [67, 65, 59, 62], 'end': 1673469136.691883}, 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}}, '2022-12-17': {'run_1': {'start': 0, 'end': 0, 'frequencies': []}}}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts = RuntimeStats()\n",
|
||||
"print(rts.runs)\n",
|
||||
"path = \"/Users/nico/Documents/test/runtimestats.json\"\n",
|
||||
"rts.loadDataFromFile(filePath=path)\n",
|
||||
"print(rts.runs)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 108,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"removing day 2022-12-17\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.manageTime()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 109,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'2023-01-11': {'run_1': {'start': 1673465959.694776,\n",
|
||||
" 'frequencies': [67, 65, 59, 62, 100],\n",
|
||||
" 'end': 1673475545.313309},\n",
|
||||
" 'run_2': {'start': 1673469145.271416,\n",
|
||||
" 'end': 1673469136.691883,\n",
|
||||
" 'frequencies': [100, 99, 98, 87, 56, 56, 58, 67]}},\n",
|
||||
" '2023-01-10': {'run_1': {'start': 1673465959.694776,\n",
|
||||
" 'frequencies': [67, 65, 59, 62],\n",
|
||||
" 'end': 1673469136.691883},\n",
|
||||
" 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}},\n",
|
||||
" '2023-01-09': {'run_1': {'start': 1673465959.694776,\n",
|
||||
" 'frequencies': [67, 65, 59, 62],\n",
|
||||
" 'end': 1673469136.691883},\n",
|
||||
" 'run_2': {'start': 1673469145.271416, 'end': 1673469136.691883}},\n",
|
||||
" '2023-01-17': {'run_1': {'start': 0, 'end': 0, 'frequencies': []}}}"
|
||||
]
|
||||
},
|
||||
"execution_count": 109,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.runs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.endRun()\n",
|
||||
"print(rts.runs)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.saveDataToFile(filePath=path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.startRun()\n",
|
||||
"print(rts.runs)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rts.countRunsDay()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 30,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"2.66"
|
||||
]
|
||||
},
|
||||
"execution_count": 30,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunTimeDay(day=\"2023-1-11\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 31,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"11.08"
|
||||
]
|
||||
},
|
||||
"execution_count": 31,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunPercentDay(day=\"2023-1-11\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 32,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"0.61"
|
||||
]
|
||||
},
|
||||
"execution_count": 32,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunPercentMultiDay()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 33,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"4.42"
|
||||
]
|
||||
},
|
||||
"execution_count": 33,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.calculateRunTimeMultiDay()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 34,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "KeyError",
|
||||
"evalue": "'2023-1-17'",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb Cell 12\u001b[0m in \u001b[0;36mRuntimeStats.addHertzDataPoint\u001b[0;34m(self, frequency)\u001b[0m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=9'>10</a>\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m---> <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=10'>11</a>\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mruns[\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtodayString][\u001b[39m\"\u001b[39m\u001b[39mrun_\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m+\u001b[39m \u001b[39mstr\u001b[39m(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mcurrentRun)][\u001b[39m\"\u001b[39m\u001b[39mfrequencies\u001b[39m\u001b[39m\"\u001b[39m]\u001b[39m.\u001b[39mappend(frequency)\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=11'>12</a>\u001b[0m \u001b[39mexcept\u001b[39;00m:\n",
|
||||
"\u001b[0;31mKeyError\u001b[0m: '2023-1-17'",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb Cell 12\u001b[0m in \u001b[0;36m<cell line: 1>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=0'>1</a>\u001b[0m rts\u001b[39m.\u001b[39;49maddHertzDataPoint(\u001b[39m67\u001b[39;49m)\n",
|
||||
"\u001b[1;32m/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb Cell 12\u001b[0m in \u001b[0;36mRuntimeStats.addHertzDataPoint\u001b[0;34m(self, frequency)\u001b[0m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=10'>11</a>\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mruns[\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mtodayString][\u001b[39m\"\u001b[39m\u001b[39mrun_\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m+\u001b[39m \u001b[39mstr\u001b[39m(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mcurrentRun)][\u001b[39m\"\u001b[39m\u001b[39mfrequencies\u001b[39m\u001b[39m\"\u001b[39m]\u001b[39m.\u001b[39mappend(frequency)\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=11'>12</a>\u001b[0m \u001b[39mexcept\u001b[39;00m:\n\u001b[0;32m---> <a href='vscode-notebook-cell:/Users/nico/Documents/GitHub/HenryPump-Drivers/piflow/runtimestats.ipynb#X13sZmlsZQ%3D%3D?line=12'>13</a>\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mruns[\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtodayString][\u001b[39m\"\u001b[39m\u001b[39mrun_\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m+\u001b[39m \u001b[39mstr\u001b[39m(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mcurrentRun)][\u001b[39m\"\u001b[39m\u001b[39mfrequencies\u001b[39m\u001b[39m\"\u001b[39m] \u001b[39m=\u001b[39m [frequency]\n",
|
||||
"\u001b[0;31mKeyError\u001b[0m: '2023-1-17'"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.addHertzDataPoint(67)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"74.92\n",
|
||||
"2023-1-10 missing frequency data for run_2\n",
|
||||
"2023-1-9 missing frequency data for run_2\n",
|
||||
"70.48\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(rts.calculateAverageHertzDay(\"2023-1-11\"))\n",
|
||||
"print(rts.calculateAverageHertzMultiDay())"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"runs = {\"run_1\" : {}}\n",
|
||||
"runs[\"run_1\"][\"start\"] = dt.timestamp(dt.now())\n",
|
||||
"runs[\"run_1\"][\"end\"] = dt.timestamp(dt.now() + td(0,3600))\n",
|
||||
"\n",
|
||||
"runs[\"run_2\"] = {}\n",
|
||||
"runs[\"run_2\"][\"start\"] = dt.timestamp(dt.now() + td(0,3600) +td(0,3600))\n",
|
||||
"\n",
|
||||
"runs[\"run_2\"][\"end\"] = dt.timestamp(dt.now() + td(0,3600) +td(0,3600) + td(0,3600))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"total_time = 0\n",
|
||||
"for key in list(runs.keys()):\n",
|
||||
" total_time = runs[key][\"end\"] - runs[key][\"start\"] + total_time"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(total_time)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"today = dt.today()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 39,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"1673991101.567802\n",
|
||||
"1674021599.0\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(dt.timestamp(dt.now()))\n",
|
||||
"print(dt.timestamp(dt.strptime(rts.todayString + \" 23:59:59\", \"%Y-%m-%d %H:%M:%S\")))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 47,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'2023-01-17'"
|
||||
]
|
||||
},
|
||||
"execution_count": 47,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"dt.strftime(dt.now(), \"%Y-%m-%d\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"{str(today.year)+\"-\"+str(today.month)+\"-\"+str(today.day): {\"run_1\": {\"start\": dt.timestamp(dt.now()), \"end\": dt.timestamp(dt.now()), \"hz\": [56,60,57,61,59,57,60]}}}"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"a = [1,2,4,5]\n",
|
||||
"b = [6,7,8,8,89]\n",
|
||||
"c = []\n",
|
||||
"c += a\n",
|
||||
"c += b\n",
|
||||
"print(math.fsum(c)/len(c))\n",
|
||||
"print((math.fsum(a)/len(a) + math.fsum(b)/len(b))/2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 35,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"works\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"t = {\"this\": \"test1\", \"that\": {\"is\": \"a bigger test\"}}\n",
|
||||
"del t[\"this\"]\n",
|
||||
"try:\n",
|
||||
" t[\"those\"]\n",
|
||||
"except:\n",
|
||||
" print(\"works\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 59,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Saving Runs\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"rts.addDay()\n",
|
||||
"rts.saveDataToFile(filePath=path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 78,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"2023-01-17\n",
|
||||
"2022-12-17\n",
|
||||
"31\n",
|
||||
"31\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"days = list(rts.runs.keys())\n",
|
||||
"days.sort()\n",
|
||||
"print(days[-1])\n",
|
||||
"print(days[0])\n",
|
||||
"print((dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days)\n",
|
||||
"if (dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days > 30:\n",
|
||||
" print((dt.strptime(days[-1],\"%Y-%m-%d\") - dt.strptime(days[0], \"%Y-%m-%d\")).days)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 110,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"True\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"s = \"Operating in Forward;\"\n",
|
||||
"if \"Operating\" in s:\n",
|
||||
" print(True)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "webkit",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.12 | packaged by conda-forge | (main, Mar 24 2022, 23:25:14) \n[Clang 12.0.1 ]"
|
||||
},
|
||||
"orig_nbformat": 4,
|
||||
"vscode": {
|
||||
"interpreter": {
|
||||
"hash": "22238595996e71d7b27448e64f75d285aa95d1182295fdd30f75905446cf0091"
|
||||
}
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
11
meshifyDrivers/piflow/testRTS.py
Normal file
11
meshifyDrivers/piflow/testRTS.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from runtimeStats import RuntimeStats as RTS
|
||||
|
||||
|
||||
rts = RTS()
|
||||
rts.loadDataFromFile("/Users/nico/Documents/test/runtimestats.json")
|
||||
rts.startRun()
|
||||
#rts.endRun()
|
||||
rts.saveDataToFile("/Users/nico/Documents/test/runtimestats.json")
|
||||
print(rts.runs)
|
||||
|
||||
|
||||
63
meshifyDrivers/piflow/utilities.py
Normal file
63
meshifyDrivers/piflow/utilities.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Utility functions for the driver."""
|
||||
import socket
|
||||
import struct
|
||||
import urllib
|
||||
import contextlib
|
||||
def get_private_ip_address():
|
||||
"""Find the private IP Address of the host device."""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
sock.connect(("8.8.8.8", 80))
|
||||
ip_address = sock.getsockname()[0]
|
||||
sock.close()
|
||||
except Exception as e:
|
||||
return e
|
||||
|
||||
return ip_address
|
||||
|
||||
def get_public_ip_address():
|
||||
ip_address = "0.0.0.0"
|
||||
try:
|
||||
with contextlib.closing(urllib.urlopen("http://checkip.amazonaws.com")) as url:
|
||||
ip_address = url.read()
|
||||
except Exception as e:
|
||||
print("Could not resolve address: {}".format(e))
|
||||
return ip_address
|
||||
return ip_address
|
||||
|
||||
|
||||
def int_to_float16(int_to_convert):
|
||||
"""Convert integer into float16 representation."""
|
||||
bin_rep = ('0' * 16 + '{0:b}'.format(int_to_convert))[-16:]
|
||||
sign = 1.0
|
||||
if int(bin_rep[0]) == 1:
|
||||
sign = -1.0
|
||||
exponent = float(int(bin_rep[1:6], 2))
|
||||
fraction = float(int(bin_rep[6:17], 2))
|
||||
|
||||
if exponent == float(0b00000):
|
||||
return sign * 2 ** -14 * fraction / (2.0 ** 10.0)
|
||||
elif exponent == float(0b11111):
|
||||
if fraction == 0:
|
||||
return sign * float("inf")
|
||||
return float("NaN")
|
||||
frac_part = 1.0 + fraction / (2.0 ** 10.0)
|
||||
return sign * (2 ** (exponent - 15)) * frac_part
|
||||
|
||||
|
||||
def ints_to_float(int1, int2):
|
||||
"""Convert 2 registers into a floating point number."""
|
||||
mypack = struct.pack('>HH', int1, int2)
|
||||
f_unpacked = struct.unpack('>f', mypack)
|
||||
print("[{}, {}] >> {}".format(int1, int2, f_unpacked[0]))
|
||||
return f_unpacked[0]
|
||||
|
||||
|
||||
def degf_to_degc(temp_f):
|
||||
"""Convert deg F to deg C."""
|
||||
return (temp_f - 32.0) * (5.0/9.0)
|
||||
|
||||
|
||||
def degc_to_degf(temp_c):
|
||||
"""Convert deg C to deg F."""
|
||||
return temp_c * 1.8 + 32.0
|
||||
Reference in New Issue
Block a user