From a041d84a7450734d73bf808ca36fa9d2704139d5 Mon Sep 17 00:00:00 2001 From: Nico Melone Date: Fri, 20 Mar 2020 15:30:09 -0500 Subject: [PATCH] new channels for piflow/plcpond --- piflow/VFD525/.vscode/.ropeproject/config.py | 114 ++++ piflow/VFD525/.vscode/.ropeproject/objectdb | 4 + piflow/VFD525/Channel.py | 615 +++++++++++++++++++ piflow/VFD525/PiFlow.py | 443 +++++++++++++ piflow/VFD525/Tags.py | 89 +++ piflow/VFD525/config.txt | 16 + piflow/VFD525/file_logger.py | 18 + piflow/VFD525/persistence.py | 21 + piflow/VFD525/utilities.py | 71 +++ plcpond/config.txt | 5 +- plcpond/persistence.py | 21 + plcpond/plcpond.py | 35 +- 12 files changed, 1440 insertions(+), 12 deletions(-) create mode 100644 piflow/VFD525/.vscode/.ropeproject/config.py create mode 100644 piflow/VFD525/.vscode/.ropeproject/objectdb create mode 100644 piflow/VFD525/Channel.py create mode 100644 piflow/VFD525/PiFlow.py create mode 100644 piflow/VFD525/Tags.py create mode 100644 piflow/VFD525/config.txt create mode 100644 piflow/VFD525/file_logger.py create mode 100644 piflow/VFD525/persistence.py create mode 100644 piflow/VFD525/utilities.py create mode 100644 plcpond/persistence.py diff --git a/piflow/VFD525/.vscode/.ropeproject/config.py b/piflow/VFD525/.vscode/.ropeproject/config.py new file mode 100644 index 0000000..dee2d1a --- /dev/null +++ b/piflow/VFD525/.vscode/.ropeproject/config.py @@ -0,0 +1,114 @@ +# The default ``config.py`` +# flake8: noqa + + +def set_prefs(prefs): + """This function is called before opening the project""" + + # Specify which files and folders to ignore in the project. + # Changes to ignored resources are not added to the history and + # VCSs. Also they are not returned in `Project.get_files()`. + # Note that ``?`` and ``*`` match all characters but slashes. + # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc' + # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc' + # '.svn': matches 'pkg/.svn' and all of its children + # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o' + # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o' + prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject', + '.hg', '.svn', '_svn', '.git', '.tox'] + + # Specifies which files should be considered python files. It is + # useful when you have scripts inside your project. Only files + # ending with ``.py`` are considered to be python files by + # default. + # prefs['python_files'] = ['*.py'] + + # Custom source folders: By default rope searches the project + # for finding source folders (folders that should be searched + # for finding modules). You can add paths to that list. Note + # that rope guesses project source folders correctly most of the + # time; use this if you have any problems. + # The folders should be relative to project root and use '/' for + # separating folders regardless of the platform rope is running on. + # 'src/my_source_folder' for instance. + # prefs.add('source_folders', 'src') + + # You can extend python path for looking up modules + # prefs.add('python_path', '~/python/') + + # Should rope save object information or not. + prefs['save_objectdb'] = True + prefs['compress_objectdb'] = False + + # If `True`, rope analyzes each module when it is being saved. + prefs['automatic_soa'] = True + # The depth of calls to follow in static object analysis + prefs['soa_followed_calls'] = 0 + + # If `False` when running modules or unit tests "dynamic object + # analysis" is turned off. This makes them much faster. + prefs['perform_doa'] = True + + # Rope can check the validity of its object DB when running. + prefs['validate_objectdb'] = True + + # How many undos to hold? + prefs['max_history_items'] = 32 + + # Shows whether to save history across sessions. + prefs['save_history'] = True + prefs['compress_history'] = False + + # Set the number spaces used for indenting. According to + # :PEP:`8`, it is best to use 4 spaces. Since most of rope's + # unit-tests use 4 spaces it is more reliable, too. + prefs['indent_size'] = 4 + + # Builtin and c-extension modules that are allowed to be imported + # and inspected by rope. + prefs['extension_modules'] = [] + + # Add all standard c-extensions to extension_modules list. + prefs['import_dynload_stdmods'] = True + + # If `True` modules with syntax errors are considered to be empty. + # The default value is `False`; When `False` syntax errors raise + # `rope.base.exceptions.ModuleSyntaxError` exception. + prefs['ignore_syntax_errors'] = False + + # If `True`, rope ignores unresolvable imports. Otherwise, they + # appear in the importing namespace. + prefs['ignore_bad_imports'] = False + + # If `True`, rope will insert new module imports as + # `from import ` by default. + prefs['prefer_module_from_imports'] = False + + # If `True`, rope will transform a comma list of imports into + # multiple separate import statements when organizing + # imports. + prefs['split_imports'] = False + + # If `True`, rope will remove all top-level import statements and + # reinsert them at the top of the module when making changes. + prefs['pull_imports_to_top'] = True + + # If `True`, rope will sort imports alphabetically by module name instead + # of alphabetically by import statement, with from imports after normal + # imports. + prefs['sort_imports_alphabetically'] = False + + # Location of implementation of + # rope.base.oi.type_hinting.interfaces.ITypeHintingFactory In general + # case, you don't have to change this value, unless you're an rope expert. + # Change this value to inject you own implementations of interfaces + # listed in module rope.base.oi.type_hinting.providers.interfaces + # For example, you can add you own providers for Django Models, or disable + # the search type-hinting in a class hierarchy, etc. + prefs['type_hinting_factory'] = ( + 'rope.base.oi.type_hinting.factory.default_type_hinting_factory') + + +def project_opened(project): + """This function is called after opening the project""" + # Do whatever you like here! diff --git a/piflow/VFD525/.vscode/.ropeproject/objectdb b/piflow/VFD525/.vscode/.ropeproject/objectdb new file mode 100644 index 0000000..9022e6d --- /dev/null +++ b/piflow/VFD525/.vscode/.ropeproject/objectdb @@ -0,0 +1,4 @@ +}qUpersistence.pyq}qUloadqcrope.base.oi.memorydb +ScopeInfo +q)q}qUbuiltinqUstrq q +Uunknownq q s}q bss. \ No newline at end of file diff --git a/piflow/VFD525/Channel.py b/piflow/VFD525/Channel.py new file mode 100644 index 0000000..b86f607 --- /dev/null +++ b/piflow/VFD525/Channel.py @@ -0,0 +1,615 @@ +"""Define Meshify channel class.""" +import time +from pycomm.ab_comm.clx import Driver as ClxDriver +from pycomm.cip.cip_base import CommError, DataError +from file_logger import filelogger as log +import minimalmodbus + +minimalmodbus.BAUDRATE = 9600 +minimalmodbus.STOPBITS = 1 + +TAG_DATAERROR_SLEEPTIME = 5 + +def binarray(intval): + """Split an integer into its bits.""" + bin_string = '{0:08b}'.format(intval) + bin_arr = [i for i in bin_string] + bin_arr.reverse() + return bin_arr + + +def read_tag(addr, tag, plc_type="CLX"): + """Read a tag from the PLC.""" + direct = plc_type == "Micro800" + clx = ClxDriver() + try: + if clx.open(addr, direct_connection=direct): + try: + val = clx.read_tag(tag) + clx.close() + return val + except DataError as err: + clx.close() + time.sleep(TAG_DATAERROR_SLEEPTIME) + log.error("Data Error during readTag({}, {}): {}".format(addr, tag, err)) + except CommError: + # err = c.get_status() + clx.close() + log.error("Could not connect during readTag({}, {})".format(addr, tag)) + except AttributeError as err: + clx.close() + log.error("AttributeError during readTag({}, {}): \n{}".format(addr, tag, err)) + clx.close() + return False + + +def read_array(addr, tag, start, end, plc_type="CLX"): + """Read an array from the PLC.""" + direct = plc_type == "Micro800" + clx = ClxDriver() + if clx.open(addr, direct_connection=direct): + arr_vals = [] + try: + for i in range(start, end): + tag_w_index = tag + "[{}]".format(i) + val = clx.read_tag(tag_w_index) + arr_vals.append(round(val[0], 4)) + if arr_vals: + clx.close() + return arr_vals + else: + log.error("No length for {}".format(addr)) + clx.close() + return False + except Exception: + log.error("Error during readArray({}, {}, {}, {})".format(addr, tag, start, end)) + err = clx.get_status() + clx.close() + log.error(err) + clx.close() + + +def write_tag(addr, tag, val, plc_type="CLX"): + """Write a tag value to the PLC.""" + direct = plc_type == "Micro800" + clx = ClxDriver() + try: + if clx.open(addr, direct_connection=direct): + try: + initial_val = clx.read_tag(tag) + write_status = clx.write_tag(tag, val, initial_val[1]) + clx.close() + return write_status + except DataError as err: + clx_err = clx.get_status() + clx.close() + log.error("--\nDataError during writeTag({}, {}, {}, plc_type={}) -- {}\n{}\n".format(addr, tag, val, plc_type, err, clx_err)) + + except CommError as err: + clx_err = clx.get_status() + log.error("--\nCommError during write_tag({}, {}, {}, plc_type={})\n{}\n--".format(addr, tag, val, plc_type, err)) + clx.close() + return False + + +class Channel(object): + """Holds the configuration for a Meshify channel.""" + + def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False): + """Initialize the channel.""" + self.mesh_name = mesh_name + self.data_type = data_type + self.last_value = None + self.value = None + self.last_send_time = 0 + self.chg_threshold = chg_threshold + self.guarantee_sec = guarantee_sec + self.map_ = map_ + self.write_enabled = write_enabled + + def __str__(self): + """Create a string for the channel.""" + return "{}\nvalue: {}, last_send_time: {}".format(self.mesh_name, self.value, self.last_send_time) + + def check(self, new_value, force_send=False): + """Check to see if the new_value needs to be stored.""" + send_needed = False + send_reason = "" + if self.data_type == 'BOOL' or self.data_type == 'STRING' or type(new_value) == str: + if self.last_send_time == 0: + send_needed = True + send_reason = "no send time" + elif self.value is None: + send_needed = True + send_reason = "no value" + elif self.value != new_value: + if self.map_: + if not self.value == self.map_[new_value]: + send_needed = True + send_reason = "value change" + else: + send_needed = True + send_reason = "value change" + elif (time.time() - self.last_send_time) > self.guarantee_sec: + send_needed = True + send_reason = "guarantee sec" + elif force_send: + send_needed = True + send_reason = "forced" + else: + if self.last_send_time == 0: + send_needed = True + send_reason = "no send time" + elif self.value is None: + send_needed = True + send_reason = "no value" + elif abs(self.value - new_value) > self.chg_threshold: + send_needed = True + send_reason = "change threshold" + elif (time.time() - self.last_send_time) > self.guarantee_sec: + send_needed = True + send_reason = "guarantee sec" + elif force_send: + send_needed = True + send_reason = "forced" + if send_needed: + self.last_value = self.value + if self.map_: + try: + self.value = self.map_[new_value] + except KeyError: + log.error("Cannot find a map value for {} in {} for {}".format(new_value, self.map_, self.mesh_name)) + self.value = new_value + else: + self.value = new_value + self.last_send_time = time.time() + log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason)) + return send_needed + + def read(self): + """Read the value.""" + pass + + +def identity(sent): + """Return exactly what was sent to it.""" + return sent + +def volume_units(vunit): + units = { + 0: "cm cubed/s", + 1: "cm cubed/min", + 2: "cm cubed/h", + 3: "cm cubed/d", + 4: "dm cubed/s", + 5: "dm cubed/min", + 6: "dm cubed/h", + 7: "dm cubed/d", + 8: "m cubed/s", + 9: "m cubed/min", + 10: "m cubed/h", + 11: "m cubed/d", + 12: "ml/s", + 13: "ml/min", + 14: "ml/h", + 15: "ml/d", + 16: "l/s", + 17: "l/min", + 18: "l/h (+)", + 19: "l/d", + 20: "hl/s", + 21: "hl/min", + 22: "hl/h", + 23: "hl/d", + 24: "Ml/s", + 25: "Ml/min", + 26: "Ml/h", + 27: "Ml/d", + 32: "af/s", + 33: "af/min", + 34: "af/h", + 35: "af/d", + 36: "ft cubed/s", + 37: "ft cubed/min", + 38: "ft cubed/h", + 39: "ft cubed/d", + 40: "fl oz/s (us)", + 41: "fl oz/min (us)", + 42: "fl oz/h (us)", + 43: "fl oz/d (us)", + 44: "gal/s (us)", + 45: "gal/min (us)", + 46: "gal/h (us)", + 47: "gal/d (us)", + 48: "Mgal/s (us)", + 49: "Mgal/min (us)", + 50: "Mgal/h (us)", + 51: "Mgal/d (us)", + 52: "bbl/s (us;liq.)", + 53: "bbl/min (us;liq.)", + 54: "bbl/h (us;liq.)", + 55: "bbl/d (us;liq.)", + 56: "bbl/s (us;beer)", + 57: "bbl/min (us;beer)", + 58: "bbl/h (us;beer)", + 59: "bbl/d (us;beer)", + 60: "bbl/s (us;oil)", + 61: "bbl/min (us;oil)", + 62: "bbl/h (us;oil)", + 63: "bbl/d (us;oil)", + 64: "bbl/s (us;tank)", + 65: "bbl/min (us;tank)", + 66: "bbl/h (us;tank)", + 67: "bbl/d (us;tank)", + 68: "gal/s (imp)", + 69: "gal/min (imp)", + 70: "gal/h (imp)", + 71: "gal/d (imp)", + 72: "Mgal/s (imp)", + 73: "Mgal/min (imp)", + 74: "Mgal/h (imp)", + 75: "Mgal/d (imp)", + 76: "bbl/s (imp;beer)", + 77: "bbl/min (imp;beer)", + 78: "bbl/h (imp;beer)", + 79: "bbl/d (imp;beer)", + 80: "bbl/s (imp;oil)", + 81: "bbl/min (imp;oil)", + 82: "bbl/h (imp;oil)", + 83: "bbl/d (imp;oil)", + 88: "kgal/s (us)", + 89: "kgal/min (us)", + 90: "kgal/h (us)", + 91: "kgal/d (us)", + 92: "MMft cubed/s", + 93: "MMft cubed/min", + 94: "MMft cubed/h", + 96: "Mft cubed/d" + } + return units[vunit] + +def totalizer_units(tunit): + + units = { + 0: "cm cubed", + 1: "dm cubed", + 2: "m cubed", + 3: "ml", + 4: "l", + 5: "hl", + 6: "Ml Mega", + 8: "af", + 9: "ft cubed", + 10: "fl oz (us)", + 11: "gal (us)", + 12: "Mgal (us)", + 13: "bbl (us;liq.)", + 14: "bbl (us;beer)", + 15: "bbl (us;oil)", + 16: "bbl (us;tank)", + 17: "gal (imp)", + 18: "Mgal (imp)", + 19: "bbl (imp;beer)", + 20: "bbl (imp;oil)", + 22: "kgal (us)", + 23: "Mft cubed", + 50: "g", + 51: "kg", + 52: "t", + 53: "oz", + 54: "lb", + 55: "STon", + 100: "Nl", + 101: "Nm cubed", + 102: "Sm cubed", + 103: "Sft cubed", + 104: "Sl", + 105: "Sgal (us)", + 106: "Sbbl (us;liq.)", + 107: "Sgal (imp)", + 108: "Sbbl (us;oil)", + 109: "MMSft cubed", + 110: "Nhl", + 251: "None" + } + return units[tunit] + +def int_to_bits(n,x): + return pad_to_x([int(digit) for digit in bin(n)[2:]],x) # [2:] to chop off the "0b" part + +def pad_to_x(n,x): + while len(n) < x: + n = [0] + n + return n + +def status_codes(n): + + status_array = int_to_bits(n,16) + status_low = { + 0: "Stopped;", + 1: "Operating in Forward;", + 2: "Operating in Reverse;", + 3: "DC operating;" + } + status_mid = { + 0: "", + 1: "Speed searching;", + 2: "Accelerating;", + 3: "At constant speed;", + 4: "Decelerating;", + 5: "Decelerating to stop;", + 6: "H/W OCS;", + 7: "S/W OCS;", + 8: "Dwell operating;" + } + status_high = { + 0: "Normal state", + 4: "Warning occurred", + 8: "Fault occurred" + } + values = { + 0: 8, + 1: 4, + 2: 2, + 3: 1 + } + + stats_low = status_array[12:] + stats_mid = status_array[8:12] + stats_high = status_array[:4] + low = 0 + mid = 0 + high = 0 + for x in range(4): + if stats_low[x] == 1: + low = low + values[x] + if stats_mid[x] == 1: + mid = mid + values[x] + if stats_high[x] == 1: + high = high + values[x] + + return status_low[low] + " " + status_mid[mid] + ' ' + status_high[high] + +def fault_code_a(n): + + fault_code_array = int_to_bits(n,16) + + """ fault = { + 0: "OCT", + 1: "OVT", + 2: "EXT-A", + 3: "EST", + 4: "COL", + 5: "GFT", + 6: "OHT", + 7: "ETH", + 8: "OLT", + 9: "Reserved", + 10: "EXT-B", + 11: "EEP", + 12: "FAN", + 13: "POT", + 14: "IOLT", + 15: "LVT" + } """ + fault = { + 0: "Overload Trip", + 1: "Underload Trip", + 2: "Inverter Overload Trip", + 3: "E-Thermal Trip", + 4: "Ground Fault Trip", + 5: "Output Image Trip", + 6: "Inmput Imaging Trip", + 7: "Reserved", + 8: "Reserved", + 9: "NTC Trip", + 10: "Overcurrent Trip", + 11: "Overvoltage Trip", + 12: "External Trip", + 13: "Arm Short", + 14: "Over Heat Trip", + 15: "Fuse Open Trip" + } + + faults = [] + counter = 15 + for x in range(16): + if fault_code_array[x] == 1: + faults = [fault[counter]] + faults + counter = counter - 1 + return ' '.join(faults) + +def fault_code_b(n): + + fault_code_array = int_to_bits(n,8) + + """ fault = { + 0: "COM", + 1: "Reserved", + 2: "NTC", + 3: "REEP", + 4: "OC2", + 5: "NBR", + 6: "SAFA", + 7: "SAFB" + } """ + fault = { + 0: "Reserved", + 1: "Reserved", + 2: "Reserved", + 3: "FAN Trip", + 4: "Reserved", + 5: "Reserved", + 6: "Pre PID Fail", + 7: "Bad contact at basic I/O board", + 8: "External Brake Trip", + 9: "No Motor Trip", + 10: "Bad Option Card", + 11: "Reserved", + 12: "Reserved", + 13: "Reserved", + 14: "Pre Over Heat Trip", + 15: "Reserved" + } + + faults = [] + counter = 7 + for x in range(8): + if fault_code_array[x] == 1: + faults = [fault[counter]] + faults + counter = counter - 1 + return ' '.join(faults) + +class ModbusChannel(Channel): + """Modbus channel object.""" + + def __init__(self, mesh_name, register_number, data_type, chg_threshold, guarantee_sec, channel_size=1, map_=False, write_enabled=False, transform_fn=identity, unit_number=1, scaling=0): + """Initialize the channel.""" + super(ModbusChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled) + self.mesh_name = mesh_name + self.register_number = register_number + self.channel_size = channel_size + self.data_type = data_type + self.last_value = None + self.value = None + self.last_send_time = 0 + self.chg_threshold = chg_threshold + self.guarantee_sec = guarantee_sec + self.map_ = map_ + self.write_enabled = write_enabled + self.transform_fn = transform_fn + self.unit_number = unit_number + self.instrument = minimalmodbus.Instrument('/dev/ttyS0', self.unit_number) + self.scaling= scaling + + def read(self): + """Return the transformed read value.""" + if self.data_type == "FLOAT": + try: + read_value = self.instrument.read_float(self.register_number,4,self.channel_size) + except IOError as e: + log.info(e) + return None + + elif self.data_type == "INTEGER" or self.data_type == "STRING": + try: + read_value = self.instrument.read_register(self.register_number, self.scaling, 4) + except IOError as e: + log.info(e) + return None + read_value = self.transform_fn(read_value) + return read_value + + def write(self, value): + """Write a value to a register""" + if self.data_type == "FLOAT": + value = float(value) + elif self.data_type == "INTEGER": + value = int(value) + else: + value = str(value) + try: + self.instrument.write_register(self.register_number,value, self.scaling, 16 if self.channel_size > 1 else 6 ) + return True + except Exception as e: + log.info("Failed to write value: {}".format(e)) + return False + + +class PLCChannel(Channel): + """PLC Channel Object.""" + + def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False, plc_type='CLX'): + """Initialize the channel.""" + super(PLCChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled) + self.plc_ip = ip + self.mesh_name = mesh_name + self.plc_tag = plc_tag + self.data_type = data_type + self.last_value = None + self.value = None + self.last_send_time = 0 + self.chg_threshold = chg_threshold + self.guarantee_sec = guarantee_sec + self.map_ = map_ + self.write_enabled = write_enabled + self.plc_type = plc_type + + def read(self): + """Read the value.""" + plc_value = None + if self.plc_tag and self.plc_ip: + read_value = read_tag(self.plc_ip, self.plc_tag, plc_type=self.plc_type) + if read_value: + plc_value = read_value[0] + + return plc_value + + +class BoolArrayChannels(Channel): + """Hold the configuration for a set of boolean array channels.""" + + def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False): + """Initialize the channel.""" + super(BoolArrayChannels, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled) + self.plc_ip = ip + self.mesh_name = mesh_name + self.plc_tag = plc_tag + self.data_type = data_type + self.last_value = None + self.value = None + self.last_send_time = 0 + self.chg_threshold = chg_threshold + self.guarantee_sec = guarantee_sec + self.map_ = map_ + self.write_enabled = write_enabled + + def compare_values(self, new_val_dict): + """Compare new values to old values to see if the values need storing.""" + send = False + for idx in new_val_dict: + try: + if new_val_dict[idx] != self.last_value[idx]: + send = True + except KeyError: + log.error("Key Error in self.compare_values for index {}".format(idx)) + send = True + return send + + def read(self, force_send=False): + """Read the value and check to see if needs to be stored.""" + send_needed = False + send_reason = "" + if self.plc_tag: + val = read_tag(self.plc_ip, self.plc_tag) + if val: + bool_arr = binarray(val[0]) + new_val = {} + for idx in self.map_: + try: + new_val[self.map_[idx]] = bool_arr[idx] + except KeyError: + log.error("Not able to get value for index {}".format(idx)) + + if self.last_send_time == 0: + send_needed = True + send_reason = "no send time" + elif self.value is None: + send_needed = True + send_reason = "no value" + elif self.compare_values(new_val): + send_needed = True + send_reason = "value change" + elif (time.time() - self.last_send_time) > self.guarantee_sec: + send_needed = True + send_reason = "guarantee sec" + elif force_send: + send_needed = True + send_reason = "forced" + + if send_needed: + self.value = new_val + self.last_value = self.value + self.last_send_time = time.time() + log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason)) + return send_needed \ No newline at end of file diff --git a/piflow/VFD525/PiFlow.py b/piflow/VFD525/PiFlow.py new file mode 100644 index 0000000..2d93c87 --- /dev/null +++ b/piflow/VFD525/PiFlow.py @@ -0,0 +1,443 @@ +"""Driver for PiFlow""" +import os +import threading +import json +import time +from random import randint +from datetime import datetime as dt +from device_base import deviceBase +import persistence +from utilities import get_public_ip_address, get_private_ip_address +from file_logger import filelogger as log + +_ = None +os.system('sudo timedatectl set-timezone America/Chicago') +log.info("PiFlow startup") + +# GLOBAL VARIABLES +WAIT_FOR_CONNECTION_SECONDS = 5 +IP_CHECK_PERIOD = 60 + + +# PERSISTENCE FILE +PERSIST = persistence.load('persist.json') +if not PERSIST: + PERSIST = { + 'flowmeter': 247, + 'drive': 1, + 'isVFD': False, + 'drive_enabled': True, + 'plc_ip': '192.168.1.12', + 'yesterday_totalizer_1': dt.today().day, + 'yesterday_totalizer_2': dt.today().day, + 'yesterday_totalizer_3': dt.today().day, + 'yesterday_total_totalizer_1': 0, + 'yesterday_total_midnight_totalizer_1': 0, + 'yesterday_total_totalizer_2': 0, + 'yesterday_total_midnight_totalizer_2': 0, + 'yesterday_total_totalizer_3': 0, + 'yesterday_total_midnight_totalizer_3': 0 + } + persistence.store(PERSIST, 'persist.json') + +drive_enabled = PERSIST['drive_enabled'] +try: + isVFD = PERSIST['isVFD'] +except: + PERSIST['isVFD'] = False + persistence.store(PERSIST) + +try: + isVFD = PERSIST['plc_ip'] +except: + PERSIST['plc_ip'] = '192.168.1.12' + persistence.store(PERSIST) + +from Tags import tags + +CHANNELS = tags + +class start(threading.Thread, deviceBase): + """Start class required by Meshify.""" + + def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, + companyId=None, offset=None, mqtt=None, Nodes=None): + """Initialize the driver.""" + threading.Thread.__init__(self) + deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, + mcu=mcu, companyId=companyId, offset=offset, + mqtt=mqtt, Nodes=Nodes) + + self.daemon = True + self.version = "21" + self.finished = threading.Event() + self.force_send = False + self.public_ip_address = "" + self.private_ip_address = "" + self.public_ip_address_last_checked = 0 + self.status = "" + self.alarm = "" + threading.Thread.start(self) + + # this is a required function for all drivers, its goal is to upload some piece of data + # about your device so it can be seen on the web + def register(self): + """Register the driver.""" + # self.sendtodb("log", "BOOM! Booted.", 0) + pass + + def run(self): + """Actually run the driver.""" + for i in range(0, WAIT_FOR_CONNECTION_SECONDS): + print("PiFlow driver will start in {} seconds".format(WAIT_FOR_CONNECTION_SECONDS - i)) + time.sleep(1) + log.info("BOOM! Starting PiFlow driver...") + + #self._check_watchdog() + self._check_ip_address() + + self.nodes["PiFlow_0199"] = self + + send_loops = 0 + + while True: + now = time.time() + if self.force_send: + log.warning("FORCE SEND: TRUE") + if isVFD: + status = {} + for chan in CHANNELS[:24]: #build status/alarm strings + try: + val = chan.read() + chan.check(val, self.force_send) + status[chan.mesh_name] = chan.value + except Exception as e: + log.warning("An error occured in status check: {}".format(e)) + try: + self.sendStatus(status) + except Exception as e: + log.warning("An error occured in send status: {}".format(e)) + for chan in CHANNELS[24:]: + try: + val = chan.read() + if chan.mesh_name in ['totalizer_1','totalizer_2','totalizer_3']: + right_now = dt.today() + today_total, yesterday_total = self.totalize(val, PERSIST['yesterday_'+chan.mesh_name], right_now.day, right_now.hour, right_now.minute, PERSIST['yesterday_total_midnight_'+chan.mesh_name], PERSIST['yesterday_total_'+chan.mesh_name], chan.mesh_name) + if chan.check(val, self.force_send): + self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow') + self.sendtodbDev(1,"today_"+chan.mesh_name, today_total,0,'PiFlow') + self.sendtodbDev(1,"yesterday_"+chan.mesh_name, yesterday_total,0,'PiFlow') + self.sendtodbDev(1, chan.mesh_name + "_units", "BBL",0,'PiFlow') + else: + if chan.check(val, self.force_send): + self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow') + except Exception as e: + log.warning("An error occured in data collection: {}".format(e)) + else: + for chan in CHANNELS: + try: + for x in range(3): + val = chan.read() + if not val == None: + break + if val == None: + log.info("No modbus read sending previous value") + val = chan.value + if chan.mesh_name in ['totalizer_1','totalizer_2','totalizer_3']: + right_now = dt.today() + today_total, yesterday_total = self.totalize(val, PERSIST['yesterday_'+chan.mesh_name], right_now.day, right_now.hour, right_now.minute, PERSIST['yesterday_total_midnight_'+chan.mesh_name], PERSIST['yesterday_total_'+chan.mesh_name], chan.mesh_name) + if chan.check(val, self.force_send): + self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow') + self.sendtodbDev(1,"today_"+chan.mesh_name, today_total,0,'PiFlow') + self.sendtodbDev(1,"yesterday_"+chan.mesh_name, yesterday_total,0,'PiFlow') + else: + if chan.check(val, self.force_send): + self.sendtodbDev(1, chan.mesh_name, chan.value, 0, 'PiFlow') + + except Exception as e: + log.warning("An error occured: {}".format(e)) + time.sleep(3) + + + # print("PiFlow driver still alive...") + if self.force_send: + if send_loops > 2: + log.warning("Turning off force_send") + self.force_send = False + send_loops = 0 + else: + send_loops += 1 + + + if (now - self.public_ip_address_last_checked) > IP_CHECK_PERIOD: + self._check_ip_address() + time.sleep(10) + + def _check_ip_address(self): + """Check the public IP address and send to Meshify if changed.""" + self.public_ip_address_last_checked = time.time() + test_public_ip = get_public_ip_address() + test_public_ip = test_public_ip[:-1] + test_private_ip = get_private_ip_address() + if not test_public_ip == self.public_ip_address and not test_public_ip == "0.0.0.0": + self.sendtodbDev(1, 'public_ip_address', test_public_ip, 0, 'PiFlow') + self.public_ip_address = test_public_ip + if not test_private_ip == self.private_ip_address: + self.sendtodbDev(1, 'private_ip_address', test_private_ip, 0, 'PiFlow') + self.private_ip_address = test_private_ip + + def PiFlow_sync(self, name, value): + """Sync all data from the driver.""" + self.force_send = True + # self.sendtodb("log", "synced", 0) + return True + + def PiFlow_flowmeternumber(self, name, unit_number): + """Change the unit number for the PiFlow flow meter""" + unit_number = int(unit_number) + if drive_enabled: + for chan in CHANNELS[0:8]: + chan.unit_number = unit_number + PERSIST['flowmeter'] = unit_number + persistence.store(PERSIST, 'persist.json') + return True + else: + for chan in CHANNELS: + chan.unit_number = unit_number + PERSIST['flowmeter'] = unit_number + persistence.store(PERSIST, 'persist.json') + self.sendtodbDev(1, 'flowmeternumber', unit_number, 0,'PiFlow') + return True + return False + + def PiFlow_drivenumber(self, name, unit_number): + """Change the unit number for the PiFlow drive""" + unit_number = int(unit_number) + for chan in CHANNELS[8:]: + chan.unit_number = unit_number + + PERSIST['drive'] = unit_number + persistence.store(PERSIST, 'persist.json') + self.sendtodbDev(1, 'drivenumber', unit_number, 0,'PiFlow') + return True + + def PiFlow_reboot(self, name, value): + os.system('reboot') + return True + + def PiFlow_drive_enabled(self, name, value): + value = int(value) + if value == 1: + PERSIST['drive_enabled'] = True + else: + PERSIST['drive_enabled'] = False + + persistence.store(PERSIST, 'persist.json') + self.sendtodbDev(1, 'drive_enabled', value, 0,'PiFlow') + return True + + def PiFlow_write(self, name, value): + """Write a value to the device via modbus""" + new_val = json.loads(str(value).replace("'", '"')) + addr_n = int(new_val['addr']) + reg_n = int(new_val['reg']) + val_n = new_val['val'] + for chan in CHANNELS: + if chan.unit_number == addr_n and chan.register_number == reg_n: + write_res = chan.write(val_n) + + log.info("Result of PiFlow_write(self, {}, {}) = {}".format(name, value, write_res)) + return write_res + + def totalize(self,val, yesterday, day, hour, minute, yesterday_total_midnight, yesterday_total,channel): + if (yesterday_total == 0 and yesterday_total_midnight == 0) or (yesterday_total == None or yesterday_total_midnight == None): + yesterday_total_midnight = val + PERSIST['yesterday_total_midnight_'+channel] = yesterday_total_midnight + persistence.store(PERSIST, 'persist.json') + today_total = val - yesterday_total_midnight + if hour == 0 and minute == 0 and not(day == yesterday): + yesterday_total = today_total + yesterday_total_midnight = val + today_total = val - yesterday_total_midnight + yesterday = day + PERSIST['yesterday_'+channel] = yesterday + PERSIST['yesterday_total_'+channel] = yesterday_total + PERSIST['yesterday_total_midnight_'+channel] = yesterday_total_midnight + persistence.store(PERSIST,'persist.json') + + return today_total,yesterday_total + + def sendStatus(self,status): + status_string = "" + + fault_codes = { + 0: "", + 2: "Auxiliary Input", + 3: "Power Loss", + 4: "UnderVoltage", + 5: "OverVoltage", + 7: "Motor Overload", + 8: "Heatsink OvrTemp", + 9: "Thermister OvrTemp", + 10: "DynBrake OverTemp", + 12: "HW OverCurrent", + 13: "Ground Fault", + 14: "Ground Warning", + 15: "Load Loss", + 17: "Input Phase Loss", + 18: "Motor PTC Trip", + 19: "Task Overrun", + 20: "TorqPrv Spd Band", + 21: "Output PhaseLoss", + 24: "Decel Inhibit", + 25: "OverSpeed Limit", + 26: "Brake Slipped", + 27: "Torq Prove Cflct", + 28: "TP Encls Config", + 29: "Analog In Loss", + 33: "AuRsts Exhausted", + 35: "IPM OverCurrent", + 36: "SW OverCurrent", + 38: "Phase U to Grnd", + 39: "Phase V to Grnd", + 40: "Phase W to Grnd", + 41: "Phase UV Short", + 42: "Phase VW Short", + 43: "Phase WU Short", + 44: "Phase UNegToGrnd", + 45: "Phase VNegToGrnd", + 46: "Phase WNegToGrnd", + 48: "System Defaulted", + 49: "Drive Powerup", + 51: "Clr Fault Queue", + 55: "Ctrl Bd Overtemp", + 59: "Invalid Code", + 61: "Shear Pin 1", + 62: "Shear Pin 2", + 64: "Drive Overload", + 67: "Pump Off", + 71: "Port 1 Adapter", + 72: "Port 2 Adapter", + 73: "Port 3 Adapter", + 74: "Port 4 Adapter", + 75: "Port 5 Adapter", + 76: "Port 6 Adapter", + 77: "IR Volts Range", + 78: "FluxAmpsRef Rang", + 79: "Excessive Load", + 80: "AutoTune Aborted", + 81: "Port 1 DPI Loss", + 82: "Port 2 DPI Loss", + 83: "Port 3 DPI Loss", + 84: "Port 4 DPI Loss", + 85: "Port 5 DPI Loss", + 86: "Port 6 DPI Loss", + 87: "Ixo VoltageRange", + 91: "Pri VelFdbk Loss", + 93: "Hw Enable Check", + 94: "Alt VelFdbk Loss", + 95: "Aux VelFdbk Loss", + 96: "PositionFdbkLoss", + 97: "Auto Tach Switch", + 100: "Parameter Chksum", + 101: "PwrDn NVS Blank", + 102: "NVS Not Blank", + 103: "PwrDn Nvs Incomp", + 104: "Pwr Brd Checksum", + 106: "Incompat MCB-PB", + 107: "Replaced MCB-PB", + 108: "Anlg Cal Chksum", + 110: "Ivld Pwr Bd Data", + 111: "PwrBd Invalid ID", + 112: "PwrBd App MinVer", + 113: "Tracking DataErr", + 115: "PwrDn Table Full", + 116: "PwrDnEntry2Large", + 117: "PwrDn Data Chksm", + 118: "PwrBd PwrDn Chks", + 124: "App ID Changed", + 125: "Using Backup App", + 134: "Start on PowerUp", + 137: "Ext Prechrg Err", + 138: "Precharge Open", + 141: "Autn Enc Angle", + 142: "Autn Spd Rstrct", + 143: "AutoTune CurReg", + 144: "AutoTune Inertia", + 145: "AutoTune Travel", + 13037: "Net IO Timeout" + } + + if status['vfd_active'] == "Stopped": + status_string = status_string + status['vfd_active'] + "; " + status['vfd_ready'] + else: + status_string = status_string + status['vfd_active'] + if status['vfd_rev']: + status_string = status_string + '; ' + status['vfd_rev'] + if status['vfd_fwd']: + status_string = status_string + '; ' + status['vfd_fwd'] + if status['vfd_atreference']: + status_string = status_string + '; ' + status['vfd_atreference'] + alarm_string = "" + if status['vfd_faulted'] == "Drive Faulted": + status_string = status_string + '; ' + status['vfd_faulted'] + if status['vfd_commloss']: + alarm_string = alarm_string + '; ' + status['vfd_commloss'] + if status['vfd_fbkalarm']: + alarm_string = alarm_string + '; ' + status['vfd_fbkalarm'] + if status['vfd_faultcode']: + alarm_string = alarm_string + '; ' + "Fault: {} Fault code: {}".format(fault_codes[status['vfd_faultcode']],str(status['vfd_faultcode'])) + if status['minspeedalarm']: + alarm_string = alarm_string + '; ' + status['minspeedalarm'] + if status['pumpedoff']: + alarm_string = alarm_string + '; ' + status['pumpedoff'] + if status['lockedout']: + alarm_string = alarm_string + '; ' + status['lockedout'] + if status['tubingpressurehi']: + alarm_string = alarm_string + '; ' + status['tubingpressurehi'] + if status['tubingpressurehihi']: + alarm_string = alarm_string + '; ' + status['tubingpressurehihi'] + if status['tubingpressurelo']: + alarm_string = alarm_string + '; ' + status['tubingpressurelo'] + if status['tubingpressurelolo']: + alarm_string = alarm_string + '; ' + status['tubingpressurelolo'] + if status['flowmeterhihi']: + alarm_string = alarm_string + '; ' + status['flowmeterhihi'] + if status['flowmeterhi']: + alarm_string = alarm_string + '; ' + status['flowmeterhi'] + if status['flowmeterlolo']: + alarm_string = alarm_string + '; ' + status['flowmeterlolo'] + if status['flowmeterlo']: + alarm_string = alarm_string + '; ' + status['flowmeterlo'] + if status['fluidlevellolo']: + alarm_string = alarm_string + '; ' + status['fluidlevellolo'] + if status['fluidlevello']: + alarm_string = alarm_string + '; ' + status['fluidlevello'] + if status['fluidlevelhi']: + alarm_string = alarm_string + '; ' + status['fluidlevelhi'] + if status['fluidlevelhihi']: + alarm_string = alarm_string + '; ' + status['fluidlevelhihi'] + try: + if status_string and status_string[0] == '; ': + status_string = status_string[1:] + if status_string and status_string[-1] == '; ': + status_string = status_string[:-1] + if alarm_string and alarm_string[0] == '; ': + alarm_string = alarm_string[1:] + if alarm_string and alarm_string[-1] == '; ': + alarm_string = alarm_string[:-1] + except Exception as e: + log.warning("Error in send status semicolon: {}".format(e)) + + if self.status != status_string: + self.status = status_string + log.info("Sending {} for {}".format(status_string, 'run_status')) + self.sendtodbDev(1, 'run_status', status_string, 0, 'PiFlow') + if self.alarm != alarm_string: + self.alarm = alarm_string + log.info("Sending {} for {}".format(alarm_string, 'fault_a')) + self.sendtodbDev(1, 'fault_a', alarm_string, 0 , 'PiFlow') + + + + \ No newline at end of file diff --git a/piflow/VFD525/Tags.py b/piflow/VFD525/Tags.py new file mode 100644 index 0000000..5d9c00b --- /dev/null +++ b/piflow/VFD525/Tags.py @@ -0,0 +1,89 @@ +from Channel import PLCChannel,Channel, ModbusChannel, status_codes, fault_code_a, fault_code_b, volume_units, totalizer_units +import persistence + +PERSIST = persistence.load('persist.json') +flowmeter_unit_number = PERSIST['flowmeter'] +drive_enabled = PERSIST['drive_enabled'] +isVFD = PERSIST['isVFD'] +if drive_enabled: + drive_unit_number = PERSIST['drive'] +try: + plc_ip = PERSIST['plc_ip'] +except: + PERSIST['plc_ip'] = '192.168.1.12' + persistence.store(PERSIST) +if isVFD: + tags = [ + PLCChannel(plc_ip,'vfd_atreference','sts_VFD_AtReference','BOOL',0,3600,map_={0: "", 1: "At speed"},plc_type='Micro800'), + PLCChannel(plc_ip,'vfd_rev','sts_VFD_REV','BOOL',0,3600,map_={0: "", 1: "Operating in Reverse"},plc_type='Micro800'), + PLCChannel(plc_ip,'vfd_fwd','sts_VFD_FWD','BOOL',0,3600,map_={0: "", 1: "Operating in Forward"},plc_type='Micro800'), + PLCChannel(plc_ip,'vfd_active','sts_VFD_Active','BOOL',0,3600,map_={0: "Stopped", 1: "Running"},plc_type='Micro800'), + PLCChannel(plc_ip,'vfd_ready','sts_VFD_Ready','BOOL',0,3600,map_={0: "Drive Not Ready", 1: "Drive Ready"},plc_type='Micro800'), + PLCChannel(plc_ip,'vfd_faultcode','sts_VFD_FaultCode','REAL',0,3600, plc_type='Micro800'), + PLCChannel(plc_ip,'vfd_faulted','AL0_VFD','BOOL',0,3600,map_={0: "", 1: "Drive Faulted"},plc_type='Micro800'), + PLCChannel(plc_ip,'vfd_commloss','AL0_VFDComLoss','BOOL',0,3600,map_={0: "", 1: "Drive Comms Loss"},plc_type='Micro800'), + PLCChannel(plc_ip,'vfd_fbkalarm','AL0_VFD_FBAlarm','BOOL',0,3600,map_={0: "", 1: "Drive Lost Feedback"},plc_type='Micro800'), + PLCChannel(plc_ip,'tubingpressurehi','AL0_TubingPressureHi','BOOL',0,3600,map_={0: "", 1: "High Tubing Pressure"},plc_type='Micro800'), + PLCChannel(plc_ip,'tubingpressurehihi','AL0_TubingPressureHiHi','BOOL',0,3600,map_={0: "", 1: "High High Tubing Pressure"},plc_type='Micro800'), + PLCChannel(plc_ip,'tubingpressurelo','AL0_TubingPressureLo','BOOL',0,3600,map_={0: "", 1: "Low Tubing Pressure"},plc_type='Micro800'), + PLCChannel(plc_ip,'tubingpressurelolo','AL0_TubingPressureLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low Tubing Pressure"},plc_type='Micro800'), + PLCChannel(plc_ip,'flowmeterhihi','AL0_FlowMeterHiHi','BOOL',0,3600,map_={0: "", 1: "High High FM Flow Rate"},plc_type='Micro800'), + PLCChannel(plc_ip,'flowmeterhi','AL0_FlowMeterHi','BOOL',0,3600,map_={0: "", 1: "High FM Flow Rate"},plc_type='Micro800'), + PLCChannel(plc_ip,'flowmeterlolo','AL0_FlowMeterLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low FM Flow Rate"},plc_type='Micro800'), + PLCChannel(plc_ip,'flowmeterlo','AL0_FlowMeterLo','BOOL',0,3600,map_={0: "", 1: "Low FM Flow Rate"},plc_type='Micro800'), + PLCChannel(plc_ip,'minspeedalarm','AL0_MinSpeedAlarm','BOOL',0,3600,map_={0: "", 1: "Drive not able to maintain min speed"},plc_type='Micro800'), + PLCChannel(plc_ip,'pumpedoff','AL0_PumpedOff','BOOL',0,3600,map_={0: "", 1: "Pumped Off"},plc_type='Micro800'), + PLCChannel(plc_ip,'fluidlevellolo','AL0_FluidLevelLoLo','BOOL',0,3600,map_={0: "", 1: "Low Low Fluid Level"},plc_type='Micro800'), + PLCChannel(plc_ip,'fluidlevello','AL0_FluidLevelLo','BOOL',0,3600,map_={0: "", 1: "Low Fluid Level"},plc_type='Micro800'), + PLCChannel(plc_ip,'fluidlevelhi','AL0_FluidLevelHi','BOOL',0,3600,map_={0: "", 1: "High Fluid Level"},plc_type='Micro800'), + PLCChannel(plc_ip,'fluidlevelhihi','AL0_FluidLevelHiHi','BOOL',0,3600,map_={0: "", 1: "High High Fluid Level"},plc_type='Micro800'), + PLCChannel(plc_ip,'lockedout','AlarmLockOut','BOOL',0,3600,map_={0: "", 1: "Locked Out Repeated Alarms"},plc_type='Micro800'), + PLCChannel(plc_ip,'volume_flow','Val_FlowmeterFR','REAL',5,3600,plc_type='Micro800'), + PLCChannel(plc_ip,'current','val_VFD_OutputCurrent','REAL',5,3600,plc_type='Micro800'), + PLCChannel(plc_ip,'frequency','val_VFD_ActualSpeed','REAL',5,3600,plc_type='Micro800'), + PLCChannel(plc_ip,'totalizer_1','Val_FlowMeterT1','REAL',5,3600,plc_type='Micro800'), + PLCChannel(plc_ip,'totalizer_2','Val_FlowMeterT2','REAL',5,3600,plc_type='Micro800'), + PLCChannel(plc_ip,'totalizer_3','Val_FlowMeterT3','REAL',5,3600,plc_type='Micro800'), + PLCChannel(plc_ip,'volume_flow_units','CMD_FlowMeterUnit','BOOL',5,3600,map_={0: "GPM", 1: "BPD"},plc_type='Micro800') + ] +else: + if drive_enabled: + tags = [ + ModbusChannel('volume_flow', 3873, 'FLOAT', 1,600,channel_size=2, unit_number=flowmeter_unit_number), + ModbusChannel('totalizer_1', 2609, 'FLOAT', 10,600,channel_size=2, unit_number=flowmeter_unit_number), + ModbusChannel('totalizer_2', 2809, 'FLOAT', 10,600,channel_size=2, unit_number=flowmeter_unit_number), + ModbusChannel('totalizer_3', 3009, 'FLOAT', 10,600,channel_size=2, unit_number=flowmeter_unit_number), + ModbusChannel('volume_flow_units', 2102, 'INTEGER', 1,3600,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=volume_units), + ModbusChannel('totalizer_1_units', 4603, 'INTEGER', 1,3600,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units), + ModbusChannel('totalizer_2_units', 4604, 'INTEGER', 1,3600,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units), + ModbusChannel('totalizer_3_units', 4605, 'INTEGER', 1,3600,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units), + ModbusChannel('run_status', 772, 'STRING', 0, 3600, channel_size=1, unit_number=drive_unit_number, transform_fn=status_codes), + ModbusChannel('frequency', 784, 'INTEGER', 0.5, 600, channel_size=2, unit_number=drive_unit_number,scaling=2 ), + ModbusChannel('current', 783, 'INTEGER', 0.5, 600, channel_size=2, unit_number=drive_unit_number,scaling=1 ), + ModbusChannel('fault_a', 815, 'STRING', 1, 3600, channel_size=1, unit_number=drive_unit_number,transform_fn=fault_code_a), + ModbusChannel('fault_b', 816, 'STRING', 1, 3600, channel_size=1, unit_number=drive_unit_number,transform_fn=fault_code_b), + ModbusChannel('pid_ref', 791, 'INTEGER', 1, 600, channel_size=1, unit_number=drive_unit_number,scaling=1), + ModbusChannel('pid_feedback', 792, 'INTEGER', 1, 600, channel_size=1, unit_number=drive_unit_number,scaling=1), + ModbusChannel('motor_rated_current', 4896, 'INTEGER', 0, 3600, channel_size=1, unit_number=drive_unit_number,scaling=1), + ModbusChannel('sleep_delay', 4924, 'INTEGER', 1, 600, channel_size=1, unit_number=drive_unit_number, scaling=1) + ] + else: + tags = [ + ModbusChannel('volume_flow', 3873, 'FLOAT', 1,600,channel_size=2, unit_number=flowmeter_unit_number), + ModbusChannel('totalizer_1', 2609, 'FLOAT', 10,600,channel_size=2, unit_number=flowmeter_unit_number), + ModbusChannel('totalizer_2', 2809, 'FLOAT', 10,600,channel_size=2, unit_number=flowmeter_unit_number), + ModbusChannel('totalizer_3', 3009, 'FLOAT', 10,600,channel_size=2, unit_number=flowmeter_unit_number), + ModbusChannel('volume_flow_units', 2102, 'INTEGER', 1,3600,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=volume_units), + ModbusChannel('totalizer_1_units', 4603, 'INTEGER', 1,3600,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units), + ModbusChannel('totalizer_2_units', 4604, 'INTEGER', 1,3600,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units), + ModbusChannel('totalizer_3_units', 4605, 'INTEGER', 1,3600,channel_size=1, unit_number=flowmeter_unit_number, transform_fn=totalizer_units) + ] + + + + + + + + + \ No newline at end of file diff --git a/piflow/VFD525/config.txt b/piflow/VFD525/config.txt new file mode 100644 index 0000000..ade265f --- /dev/null +++ b/piflow/VFD525/config.txt @@ -0,0 +1,16 @@ +{ + +"driverFileName":"PiFlow.py", +"deviceName":"piflow", +"driverId":"0280", +"releaseVersion":"21", +"files": { + "file1":"PiFlow.py", + "file2":"Channel.py", + "file3":"file_logger.py", + "file4":"Tags.py", + "file5":"utilities.py", + "file6":"persistence.py" + } + +} \ No newline at end of file diff --git a/piflow/VFD525/file_logger.py b/piflow/VFD525/file_logger.py new file mode 100644 index 0000000..fd8c432 --- /dev/null +++ b/piflow/VFD525/file_logger.py @@ -0,0 +1,18 @@ +"""Logging setup for PiFlow""" +import logging +from logging.handlers import RotatingFileHandler +import sys + +log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s') +log_file = './PiFlow.log' +my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=500*1024, + backupCount=2, encoding=None, delay=0) +my_handler.setFormatter(log_formatter) +my_handler.setLevel(logging.INFO) +filelogger = logging.getLogger('PiFlow') +filelogger.setLevel(logging.INFO) +filelogger.addHandler(my_handler) + +console_out = logging.StreamHandler(sys.stdout) +console_out.setFormatter(log_formatter) +filelogger.addHandler(console_out) diff --git a/piflow/VFD525/persistence.py b/piflow/VFD525/persistence.py new file mode 100644 index 0000000..8c8703f --- /dev/null +++ b/piflow/VFD525/persistence.py @@ -0,0 +1,21 @@ +"""Data persistance functions.""" +# if more advanced persistence is needed, use a sqlite database +import json + + +def load(filename="persist.json"): + """Load persisted settings from the specified file.""" + try: + with open(filename, 'r') as persist_file: + return json.load(persist_file) + except Exception: + return False + + +def store(persist_obj, filename="persist.json"): + """Store the persisting settings into the specified file.""" + try: + with open(filename, 'w') as persist_file: + return json.dump(persist_obj, persist_file, indent=4) + except Exception: + return False diff --git a/piflow/VFD525/utilities.py b/piflow/VFD525/utilities.py new file mode 100644 index 0000000..f9b2081 --- /dev/null +++ b/piflow/VFD525/utilities.py @@ -0,0 +1,71 @@ +"""Utility functions for the driver.""" +import socket +import struct +import urllib +import contextlib +def get_private_ip_address(): + """Find the private IP Address of the host device.""" + try: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.connect(("8.8.8.8", 80)) + ip_address = sock.getsockname()[0] + sock.close() + except Exception as e: + return e + + return ip_address + +def get_public_ip_address(): + ip_address = "0.0.0.0" + try: + with contextlib.closing(urllib.urlopen("httpd://checkip.amazonaws.com")) as url: + ip_address = url.read() + except Exception as e: + print("Could not resolve address: {}".format(e)) + return ip_address + return ip_address[:-1] + + +def int_to_float16(int_to_convert): + """Convert integer into float16 representation.""" + bin_rep = ('0' * 16 + '{0:b}'.format(int_to_convert))[-16:] + sign = 1.0 + if int(bin_rep[0]) == 1: + sign = -1.0 + exponent = float(int(bin_rep[1:6], 2)) + fraction = float(int(bin_rep[6:17], 2)) + + if exponent == float(0b00000): + return sign * 2 ** -14 * fraction / (2.0 ** 10.0) + elif exponent == float(0b11111): + if fraction == 0: + return sign * float("inf") + return float("NaN") + frac_part = 1.0 + fraction / (2.0 ** 10.0) + return sign * (2 ** (exponent - 15)) * frac_part + + +def ints_to_float(int1, int2): + """Convert 2 registers into a floating point number.""" + mypack = struct.pack('>HH', int1, int2) + f_unpacked = struct.unpack('>f', mypack) + print("[{}, {}] >> {}".format(int1, int2, f_unpacked[0])) + return f_unpacked[0] + + +def degf_to_degc(temp_f): + """Convert deg F to deg C.""" + return (temp_f - 32.0) * (5.0/9.0) + + +def degc_to_degf(temp_c): + """Convert deg C to deg F.""" + return temp_c * 1.8 + 32.0 + +def get_public_ip_address(): + try: + url = urllib.urlopen("http://checkip.amazonaws.com") + ip_address = url.read() + except Exception as e: + return e + return ip_address diff --git a/plcpond/config.txt b/plcpond/config.txt index 532edc9..db2c020 100644 --- a/plcpond/config.txt +++ b/plcpond/config.txt @@ -2,11 +2,12 @@ "driverFileName": "plcpond.py", "deviceName": "plcpond", "driverId": "0220", - "releaseVersion": "3", + "releaseVersion": "4", "files": { "file1": "plcpond.py", "file2": "utilities.py", "file3": "Channel.py", - "file4": "file_logger.py" + "file4": "file_logger.py", + "file5": "persistence.py" } } \ No newline at end of file diff --git a/plcpond/persistence.py b/plcpond/persistence.py new file mode 100644 index 0000000..8c8703f --- /dev/null +++ b/plcpond/persistence.py @@ -0,0 +1,21 @@ +"""Data persistance functions.""" +# if more advanced persistence is needed, use a sqlite database +import json + + +def load(filename="persist.json"): + """Load persisted settings from the specified file.""" + try: + with open(filename, 'r') as persist_file: + return json.load(persist_file) + except Exception: + return False + + +def store(persist_obj, filename="persist.json"): + """Store the persisting settings into the specified file.""" + try: + with open(filename, 'w') as persist_file: + return json.dump(persist_obj, persist_file, indent=4) + except Exception: + return False diff --git a/plcpond/plcpond.py b/plcpond/plcpond.py index bac4621..8f0ef02 100644 --- a/plcpond/plcpond.py +++ b/plcpond/plcpond.py @@ -9,7 +9,7 @@ from random import randint from device_base import deviceBase from Channel import PLCChannel, read_tag, write_tag, TAG_DATAERROR_SLEEPTIME from utilities import get_public_ip_address - +import persistence _ = None @@ -34,22 +34,37 @@ logger.info("plcpond startup") # GLOBAL VARIABLES WATCHDOG_SEND_PERIOD = 3600 # Seconds, the longest amount of time before sending the watchdog status PLC_IP_ADDRESS = "192.168.1.12" + +PERSIST = persistence.load('persist.json') +if not PERSIST: + PERSIST = { + 'flowmeter_enable': False + } + persistence.store(PERSIST, 'persist.json') + CHANNELS = [ PLCChannel(PLC_IP_ADDRESS, "cfgnumberofponds", "cfgNumberOfPonds", "REAL", 0.5, 600, map_=False, write_enabled=False, plc_type='Micro800'), - PLCChannel(PLC_IP_ADDRESS, "pond1height", "pond1Height", "REAL", 1.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), PLCChannel(PLC_IP_ADDRESS, "pond2height", "pond2Height", "REAL", 1.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), PLCChannel(PLC_IP_ADDRESS, "pond3height", "pond3Height", "REAL", 1.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), PLCChannel(PLC_IP_ADDRESS, "pond4height", "pond4Height", "REAL", 1.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), - - PLCChannel(PLC_IP_ADDRESS, "pond1volume", "pond1Volume", "REAL", 500.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), - PLCChannel(PLC_IP_ADDRESS, "pond2volume", "pond2Volume", "REAL", 500.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), - PLCChannel(PLC_IP_ADDRESS, "pond3volume", "pond3Volume", "REAL", 500.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), - PLCChannel(PLC_IP_ADDRESS, "pond4volume", "pond4Volume", "REAL", 500.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), - - PLCChannel(PLC_IP_ADDRESS, "pondvolumetotal", "pondVolumeTotal", "REAL", 1000.0, 600, map_=False, write_enabled=False, plc_type='Micro800') + PLCChannel(PLC_IP_ADDRESS, "pond1volume", "pond1Volume", "REAL", 10000.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), + PLCChannel(PLC_IP_ADDRESS, "pond2volume", "pond2Volume", "REAL", 10000.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), + PLCChannel(PLC_IP_ADDRESS, "pond3volume", "pond3Volume", "REAL", 10000.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), + PLCChannel(PLC_IP_ADDRESS, "pond4volume", "pond4Volume", "REAL", 10000.0, 600, map_=False, write_enabled=False, plc_type='Micro800'), + PLCChannel(PLC_IP_ADDRESS, "pondvolumetotal", "pondVolumeTotal", "REAL", 200000.0, 600, map_=False, write_enabled=False, plc_type='Micro800') ] +if PERSIST['flowmeter_enable']: + CHANNELS.append(PLCChannel(PLC_IP_ADDRESS, 'volume_flow', 'Val_FlowMeterFR', 'REAL', 500, 3600, plc_type='Micro800')) + CHANNELS.append(PLCChannel(PLC_IP_ADDRESS, 'totalizer_1', 'Val_FlowMeterT1', 'REAL', 1000, 3600, plc_type='Micro800')) + CHANNELS.append(PLCChannel(PLC_IP_ADDRESS, 'totalizer_2', 'Val_FlowMeterT2', 'REAL', 1000, 3600, plc_type='Micro800')) + CHANNELS.append(PLCChannel(PLC_IP_ADDRESS, 'totalizer_3', 'Val_FlowMeterT3', 'REAL', 1000, 3600, plc_type='Micro800')) + CHANNELS.append(PLCChannel(PLC_IP_ADDRESS, 'today_flow', 'Val_FlowMeterToday', 'REAL', 1000, 3600, plc_type='Micro800')) + CHANNELS.append(PLCChannel(PLC_IP_ADDRESS, 'yesterday_flow', 'Val_FlowMeterYesterday', 'REAL', 1000, 3600, plc_type='Micro800')) + CHANNELS.append(PLCChannel(PLC_IP_ADDRESS, 'current_month_flow', 'Val_FlowMeterMonth', 'REAL', 1000, 3600, plc_type='Micro800')) + CHANNELS.append(PLCChannel(PLC_IP_ADDRESS, 'prev_month_flow', 'Val_FlowMeterLastMonth ', 'REAL', 1000, 3600, plc_type='Micro800')) + CALIBRATION_TABLES = [[],[], [], [], []] # position 0 is a dummy table @@ -64,7 +79,7 @@ class start(threading.Thread, deviceBase): deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes) self.daemon = True - self.version = "3" + self.version = "4" self.finished = threading.Event() self.force_send = False threading.Thread.start(self)