diff --git a/.gitignore b/.gitignore index 759efd6..08ddf29 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ *.pyc */bower_components/* -*/node_modules/* \ No newline at end of file +*/node_modules/* +*.log diff --git a/init/loggers b/init/tagserver similarity index 64% rename from init/loggers rename to init/tagserver index 58c4809..a9de9b3 100755 --- a/init/loggers +++ b/init/tagserver @@ -1,8 +1,8 @@ #! /bin/sh -# /etc/init.d/loggers +# /etc/init.d/tagserver ### BEGIN INIT INFO -# Provides: loggers +# Provides: tagserver # Required-Start: $remote_fs $syslog # Required-Stop: $remote_fs $syslog # Default-Start: 2 3 4 5 @@ -17,22 +17,19 @@ case "$1" in start) echo "Starting loggers" - kill -9 $(cat /root/solar_ww.pid) + kill -9 $(cat /root/tagserver.pid) # run application you want to start - #python /home/poconsole/src/dataLogger/alarmLogger.py & - #python /home/poconsole/src/dataLogger/dataLogger.py & - /usr/bin/python /home/poconsole/tagserver/python/tagserver_SQLite.py > /dev/null 2>&1 & echo $! > "/root/tagserver.pid" + /usr/bin/python /root/tagserver/python/tagserver_SQLite.py > /dev/null 2>&1 & echo $! > "/root/tagserver.pid" ;; stop) echo "Stopping loggers" # kill application you want to stop - #killall python kill -9 $(cat /root/tagserver.pid) ;; *) - echo "Usage: /etc/init.d/loggers {start|stop}" + echo "Usage: /etc/init.d/tagserver {start|stop}" exit 1 ;; esac diff --git a/python/mysql_cfg.pickle b/python/mysql_cfg.pickle new file mode 100644 index 0000000..fdde65b --- /dev/null +++ b/python/mysql_cfg.pickle @@ -0,0 +1,18 @@ +(dp0 +S'host' +p1 +S'127.0.0.1' +p2 +sS'password' +p3 +S'henrypump' +p4 +sS'user' +p5 +S'website' +p6 +sS'database' +p7 +S'poconsole' +p8 +s. \ No newline at end of file diff --git a/python/pickle_mysql_config.py b/python/pickle_mysql_config.py new file mode 100644 index 0000000..1948d0a --- /dev/null +++ b/python/pickle_mysql_config.py @@ -0,0 +1,11 @@ +import pickle + +mysql_cfg = { + 'host':'127.0.0.1', + 'user':'website', + 'password':'henrypump', + 'database':'poconsole' +} + +with open('mysql_cfg.pickle', 'wb') as pickleconfig: + pickle.dump(mysql_cfg, pickleconfig) diff --git a/python/pycomm.log b/python/pycomm.log index d83c3a7..7363b30 100644 --- a/python/pycomm.log +++ b/python/pycomm.log @@ -1 +1 @@ -pycomm.ab_comm.clx WARNING 2016-01-25 14:45:25,488 (5, 'forward_close returned False') +pycomm.cip.cip_base WARNING 2016-04-07 16:14:59,861 (5, 'forward_close returned False') diff --git a/python/pycomm_micro/__init__.pyc b/python/pycomm_micro/__init__.pyc index a2df465..bc5fab5 100644 Binary files a/python/pycomm_micro/__init__.pyc and b/python/pycomm_micro/__init__.pyc differ diff --git a/python/pycomm_micro/ab_comm/__init__.pyc b/python/pycomm_micro/ab_comm/__init__.pyc index fc534df..d6cb62c 100644 Binary files a/python/pycomm_micro/ab_comm/__init__.pyc and b/python/pycomm_micro/ab_comm/__init__.pyc differ diff --git a/python/pycomm_micro/ab_comm/clx.pyc b/python/pycomm_micro/ab_comm/clx.pyc index 38ad70b..53c8acf 100644 Binary files a/python/pycomm_micro/ab_comm/clx.pyc and b/python/pycomm_micro/ab_comm/clx.pyc differ diff --git a/python/pycomm_micro/cip/__init__.pyc b/python/pycomm_micro/cip/__init__.pyc index cf7519b..ba3f8dd 100644 Binary files a/python/pycomm_micro/cip/__init__.pyc and b/python/pycomm_micro/cip/__init__.pyc differ diff --git a/python/pycomm_micro/cip/cip_base.pyc b/python/pycomm_micro/cip/cip_base.pyc index 81635c4..1ac6ff7 100644 Binary files a/python/pycomm_micro/cip/cip_base.pyc and b/python/pycomm_micro/cip/cip_base.pyc differ diff --git a/python/pycomm_micro/cip/cip_const.pyc b/python/pycomm_micro/cip/cip_const.pyc index 130af93..ced24b9 100644 Binary files a/python/pycomm_micro/cip/cip_const.pyc and b/python/pycomm_micro/cip/cip_const.pyc differ diff --git a/python/pycomm_micro/common.pyc b/python/pycomm_micro/common.pyc index 7276546..26cab34 100644 Binary files a/python/pycomm_micro/common.pyc and b/python/pycomm_micro/common.pyc differ diff --git a/python/tag_mysql.py b/python/tag_mysql.py new file mode 100644 index 0000000..ed468d2 --- /dev/null +++ b/python/tag_mysql.py @@ -0,0 +1,88 @@ +#! /usr/bin/python + +from datetime import datetime +import time +import mysql.connector as mysqlcon +from pycomm.ab_comm.clx import Driver as ClxDriver +import micro800 as u800 +import traceback +import pickle + +with open('/root/tagserver/python/mysql_cfg.pickle', 'rb') as cfgFile: + mysql_cfg = pickle.load(cfgFile) + con = mysqlcon.connect(**mysql_cfg) + + + +def readTag(addr, tag): + time.sleep(0.01) + c = ClxDriver() + if c.open(addr): + try: + v = c.read_tag(tag) + # print(v) + return v + except Exception: + print("ERROR RETRIEVING TAG: {}".format(tag)) + err = c.get_status() + c.close() + print traceback.print_exc() + pass + c.close() + +class Tag(): + global readTag, con, PLC_IP_ADDRESS + + def __init__(self, name, tag, db_id, data_type, change_threshold, guarantee_sec, mapFn=None, plc_type='CLX', plc_ip_address='192.168.1.10'): + self.name = str(name) + self.tag = str(tag) + self.data_type = str(data_type) + self.value = None + self.last_value = None + self.guarantee_sec = guarantee_sec + self.chg_threshold = change_threshold + self.last_send_time = 0 + self.mapFn = mapFn + self.plc_type = plc_type + self.readFn = readTag + self.db_id = db_id + if self.plc_type == "u800": + self.readFn = u800.readTag + self.plc_ip_address = plc_ip_address + + def read(self, forceSend): + writeToDB = False + if self.tag: + v = self.readFn(self.plc_ip_address, self.tag) + if v: + if self.data_type == 'BOOL' or self.data_type == 'STRING': + val = v[0] + if self.mapFn: + val = self.mapFn[val] + if (self.last_send_time == 0) or (self.value is None) or not (self.value == val) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend): + self.last_value = self.value + self.value = val + writeToDB = True + else: + writeToDB = False + else: + if (self.last_send_time == 0) or (self.value is None) or (abs(self.value - v[0]) > self.chg_threshold) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend): + self.last_value = self.value + self.value = v[0] + writeToDB = True + else: + writeToDB = False + if writeToDB: + self.sendToDB() + + def sendToDB(self): + # TODO: Datetime + query = "INSERT INTO poconsole.tag_vals (dtime, tagID, val) VALUES ('{}', '{}', {})".format(time.strftime('%Y-%m-%d %H:%M:%S'), self.db_id, self.value) + self.last_send_time = time.time() + print query + # TODO: CHECK ON THIS LOGIC -- with con: + con.connect() + cur = con.cursor() + cur.execute(query) + con.commit() + cur.close() diff --git a/python/tag_sqlite.py b/python/tag_sqlite.py new file mode 100644 index 0000000..74eb046 --- /dev/null +++ b/python/tag_sqlite.py @@ -0,0 +1,82 @@ +#! /usr/bin/python +from datetime import datetime +import time +import sqlite3 as lite +from pycomm.ab_comm.clx import Driver as ClxDriver +import micro800 as u800 +import traceback +import pickle + +con = lite.connect("/mnt/usb/data.db") +# con = lite.connect("/Users/patrickjmcd/Desktop/data.db") + + +def readTag(addr, tag): + time.sleep(0.01) + c = ClxDriver() + if c.open(addr): + try: + v = c.read_tag(tag) + return v + except Exception: + print("ERROR RETRIEVING TAG: {} at {}".format(tag, addr)) + err = c.get_status() + c.close() + print err + pass + c.close() + +class Tag(): + global readTag, con + + def __init__(self, name, tag, db_id, data_type, change_threshold, guarantee_sec, mapFn=None, plc_type='CLK', plc_ip_address='192.168.1.10'): + self.name = name + self.tag = tag + self.data_type = data_type + self.value = None + self.last_value = None + self.guarantee_sec = guarantee_sec + self.chg_threshold = change_threshold + self.last_send_time = 0 + self.mapFn = mapFn + self.plc_type = plc_type + self.readFn = readTag + self.db_id = db_id + if self.plc_type == "u800": + self.readFn = u800.readTag + self.plc_ip_address = plc_ip_address + + def read(self, forceSend): + writeToDB = False + if self.tag: + v = self.readFn(str(self.plc_ip_address), str(self.tag)) + if v: + if self.data_type == 'BOOL' or self.data_type == 'STRING': + val = v[0] + if self.mapFn: + val = self.mapFn[val] + if (self.last_send_time == 0) or (self.value is None) or not (self.value == val) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend): + self.last_value = self.value + self.value = val + writeToDB = True + else: + writeToDB = False + else: + if (self.last_send_time == 0) or (self.value is None) or (abs(self.value - v[0]) > self.chg_threshold) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend): + self.last_value = self.value + self.value = v[0] + writeToDB = True + else: + writeToDB = False + if writeToDB: + self.sendToDB() + + def sendToDB(self): + query = "INSERT INTO tag_vals (dtime, tagID, val) VALUES ({}, '{}', {})".format(time.time(), self.db_id, self.value) + self.last_send_time = time.time() + print query + with con: + cur = con.cursor() + cur.execute(query) + con.commit() + cur.close() diff --git a/python/tagserver_MySQL.py b/python/tagserver_MySQL.py index 48b7a18..f85a9b5 100644 --- a/python/tagserver_MySQL.py +++ b/python/tagserver_MySQL.py @@ -1,70 +1,94 @@ #!/usr/bin/env python ''' -Created on Dec 8, 2015 - +MySQL Tag Server +Created on April 7, 2016 @author: Patrick McDonagh +@description: Continuously loops through a list of tags to store values from a PLC into a MySQL database ''' - - -from datetime import datetime -import sys -from random import randint +import mysql.connector as mysqlcon +import pickle +from tag_mysql import Tag +import traceback import time -import MySQLdb -import tuxeip - - -#TUXEIP Connection to PLC -from tuxeip import TuxEIP, LGX, LGX_REAL +with open('/root/tagserver/python/mysql_cfg.pickle', 'rb') as pickleconfig: + mysql_cfg = pickle.load(pickleconfig) +if mysql_cfg: + db = mysqlcon.connect(**mysql_cfg) +tag_store = {} +configProperties = {} def main(): - - db = MySQLdb.connect(host="127.0.0.1",user="website",passwd="henrypump",db="TagData") + db.connect() cur = db.cursor() - query = "SELECT * FROM TagData.tags WHERE deleted = 0;" + query = "SELECT * FROM tags WHERE class = 5 AND deleted = 0" cur.execute(query) tags = cur.fetchall() - # ((1L, 'DC_Bus_Voltage', datetime.datetime(2015, 12, 8, 16, 2, 32), 'V', 0L), (2L, 'Output_Frequency', datetime.datetime(2015, 12, 8, 16, 31, 12), 'Hz', 0L)) - db.commit() - db.close() + print tags + # [(1, u'Century Counter Up', 5, u'Century_Counter_Up', u'REAL', 10.0, 3600, None, 0)] + db.disconnect() - PLC_IP_ADDRESS = "10.10.10.3" # MAKE THIS A db VALUE - scan_rate = 10 - tagList = []; - if len(tags) > 0: - for t in tags: - tagList.append({"id":int(t[0]), "name":t[1], "val":None, "lastVal":None}); + configObj = {} + db.connect() + cur = db.cursor() + query = "SELECT parameter, val FROM config GROUP BY parameter;" + cur.execute(query) + config = cur.fetchall() + db.disconnect() + for x in config: + configObj[x[0]] = x[1] try: - tux = TuxEIP(libpath="/usr/lib/libtuxeip.so") - sess = tux.OpenSession(PLC_IP_ADDRESS) - reg = tux.RegisterSession(sess) - conn = tux.ConnectPLCOverCNET(sess, LGX, 1, 100, 123, randint(0,9999), 123, 321, 100, 5000, 1, '01') + configProperties['PLC_IP_ADDRESS'] = str(configObj['ip_address']) + print("FYI, using PLC IP Address from the database {0}".format(configProperties['PLC_IP_ADDRESS'])) + except KeyError: + print("FYI, there is no PLC IP Address stored in the database, defaulting to 192.168.1.10") + configProperties['PLC_IP_ADDRESS'] = "192.168.1.10" - while True: - for r in tagList: - r["val"] = tux.ReadLGXDataAsFloat(sess, conn, r['name'], 1)[0] - print("{0} - {1}".format(r["name"], r["val"])) - if not r["val"] == r["lastVal"]: - db = MySQLdb.connect(host="127.0.0.1",user="website",passwd="henrypump",db="TagData") - cur = db.cursor() - aQuery = """INSERT INTO TagData.values (tagID, val) VALUES ('%d', '%f');"""%(r["id"], float(r["val"])) - print(aQuery) - storeVal = cur.execute(aQuery) - db.commit() - db.close() - r["lastVal"] = r["val"] + try: + configProperties['plc_type'] = str(configObj['plc_type']) + print("FYI, using PLC Type from the database {0}".format(configProperties['plc_type'])) + except KeyError: + print("FYI, there is no PLC Type stored in the database, defaulting to CLX") + configProperties['plc_type'] = "CLX" + + try: + configProperties['scan_rate'] = int(configObj['scan_rate']) + print("FYI, using Scan Rate from the database {0}".format(configProperties['scan_rate'])) + except KeyError: + print("FYI, there is no Scan Rate stored in the database, defaulting to 10 seconds") + configProperties['scan_rate'] = 10 + + try: + sa_test = str(configObj['save_all']) + if sa_test == "true": + configProperties['save_all'] = True + else: + configProperties['save_all'] = False + print("FYI, value for save_all is {0}".format(configProperties['save_all'])) + except KeyError: + print("FYI, there is no save_all value stored in the database, using False") + configProperties['save_all'] = False + + + + + for t in tags: + tag_store[t[1]] = Tag(t[1], t[3], t[0], t[5], t[6], t[7], mapFn=t[8], plc_type=configProperties['plc_type'], plc_ip_address=configProperties['PLC_IP_ADDRESS']) + + + while True: + for tag in tag_store: + try: + tag_store[tag].read(configProperties['save_all']) + except: + print("ERROR EVALUATING {}".format(tag)) + traceback.print_exc() + time.sleep(configProperties['scan_rate']) - time.sleep(10) - except Exception as err: - print err - pass - - if __name__ == '__main__': main() diff --git a/python/tagserver_SQLite.py b/python/tagserver_SQLite.py index 451db48..20a3628 100644 --- a/python/tagserver_SQLite.py +++ b/python/tagserver_SQLite.py @@ -8,125 +8,87 @@ Created on Dec 8, 2015 import time import sqlite3 as lite -from pycomm.ab_comm.clx import Driver as ClxDriver -import micro800 as u800 -import logging +from tag_sqlite import Tag +import traceback # con = lite.connect("/usr/db/data.db") con = lite.connect('/mnt/usb/data.db') configProperties = {} -def readTag(addr, tag): - logging.basicConfig( - filename="ClxDriver.log", - format="%(levelname)-10s %(asctime)s %(message)s", - level=logging.DEBUG - ) - c = ClxDriver() - - if c.open(addr): - try: - v = c.read_tag(tag) - # print(v) - return v - except Exception as e: - err = c.get_status() - c.close() - print err - print e - pass - c.close() def main(): - with con: - cur = con.cursor() - query = "SELECT * FROM tags WHERE deleted = 0;" - cur.execute(query) - tags = cur.fetchall() + with con: + cur = con.cursor() + query = "SELECT * FROM tags WHERE deleted = 0;" + cur.execute(query) + tags = cur.fetchall() - configObj = {} + configObj = {} - with con: - cur = con.cursor() - query = "SELECT parameter, val FROM config GROUP BY parameter;" - cur.execute(query) - config = cur.fetchall() - for x in config: - configObj[x[0]] = x[1] + with con: + cur = con.cursor() + query = "SELECT parameter, val FROM config GROUP BY parameter;" + cur.execute(query) + config = cur.fetchall() + for x in config: + configObj[x[0]] = x[1] - try: - configProperties['PLC_IP_ADDRESS'] = str(configObj['ip_address']) - print("FYI, using PLC IP Address from the database {0}".format(configProperties['PLC_IP_ADDRESS'])) - except KeyError: - print("FYI, there is no PLC IP Address stored in the database, defaulting to 192.168.1.10") - configProperties['PLC_IP_ADDRESS'] = "192.168.1.10" + try: + configProperties['PLC_IP_ADDRESS'] = str(configObj['ip_address']) + print("FYI, using PLC IP Address from the database {0}".format(configProperties['PLC_IP_ADDRESS'])) + except KeyError: + print("FYI, there is no PLC IP Address stored in the database, defaulting to 192.168.1.10") + configProperties['PLC_IP_ADDRESS'] = "192.168.1.10" - try: - configProperties['plc_type'] = str(configObj['plc_type']) - print("FYI, using PLC Type from the database {0}".format(configProperties['plc_type'])) - except KeyError: - print("FYI, there is no PLC Type stored in the database, defaulting to CLX") - configProperties['plc_type'] = "CLX" + try: + configProperties['plc_type'] = str(configObj['plc_type']) + print("FYI, using PLC Type from the database {0}".format(configProperties['plc_type'])) + except KeyError: + print("FYI, there is no PLC Type stored in the database, defaulting to CLX") + configProperties['plc_type'] = "CLX" - try: - configProperties['scan_rate'] = int(configObj['scan_rate']) - print("FYI, using Scan Rate from the database {0}".format(configProperties['scan_rate'])) - except KeyError: - print("FYI, there is no Scan Rate stored in the database, defaulting to 10 seconds") - configProperties['scan_rate'] = 10 + try: + configProperties['scan_rate'] = int(configObj['scan_rate']) + print("FYI, using Scan Rate from the database {0}".format(configProperties['scan_rate'])) + except KeyError: + print("FYI, there is no Scan Rate stored in the database, defaulting to 10 seconds") + configProperties['scan_rate'] = 10 - try: - sa_test = str(configObj['save_all']) - if sa_test == "true": - configProperties['save_all'] = True - else: - configProperties['save_all'] = False - print("FYI, value for save_all is {0}".format(configProperties['save_all'])) - except KeyError: - print("FYI, there is no save_all value stored in the database, using False") - configProperties['save_all'] = False + try: + sa_test = str(configObj['save_all']) + if sa_test == "true": + configProperties['save_all'] = True + else: + configProperties['save_all'] = False + print("FYI, value for save_all is {0}".format(configProperties['save_all'])) + except KeyError: + print("FYI, there is no save_all value stored in the database, using False") + configProperties['save_all'] = False - tagList = [] - print("\nScan List\n--------------") - if len(tags) > 0: - for t in tags: - tagList.append({"id": int(t[0]), "name": t[1], "val": None, "lastVal": None}) - print(t[1]) - print("--------------\n") + tag_store = {} - while True: - try: - for r in tagList: - r['val'] = 0 - if configProperties['plc_type'] == "u800": - r["val"] = u800.readMicroTag(configProperties['PLC_IP_ADDRESS'], str(r['name']))[0] - else: - r["val"] = readTag(configProperties['PLC_IP_ADDRESS'], str(r['name']))[0] + if len(tags) > 0: + for t in tags: + # (1, u'Pump Intake Pressure', u'5', u'Pump_Intake_Pressure', u'Pressure at the Intake of the Pump', None, 100.0, 3600, u'PSI', 0.0, 3000.0, u'2016-04-13 21:27:01', 0) + tag_store[t[1]] = Tag(t[1], t[3], t[0], t[5], t[6], t[7], mapFn=t[8], plc_type=configProperties['plc_type'], plc_ip_address=configProperties['PLC_IP_ADDRESS']) - print("{0} - {1}".format(r["name"], r["val"])) - if (not configProperties['save_all'] and not r["val"] == r["lastVal"]) or configProperties['save_all']: - with con: - cur = con.cursor() - aQuery = """INSERT INTO vals (tagID, val) VALUES ('%d', '%f');""" % (r["id"], float(r["val"])) - # print(aQuery) - cur.execute(aQuery) - con.commit() - print("") - r["lastVal"] = r["val"] - print("-----------") - time.sleep(configProperties['scan_rate']) - except Exception as err: - print err - main() + while True: + for tag in tag_store: + try: + tag_store[tag].read(configProperties['save_all']) + except: + print("ERROR EVALUATING {}".format(tag)) + traceback.print_exc() + time.sleep(configProperties['scan_rate']) if __name__ == '__main__': - main() + main() diff --git a/python/tuxeip.py b/python/tuxeip.py deleted file mode 100644 index 1c39e40..0000000 --- a/python/tuxeip.py +++ /dev/null @@ -1,276 +0,0 @@ -#! /usr/bin/env python - -# Copyright (C) 2014 Gayner Technical Services Pty Ltd - -from ctypes import * - -# PLC TYPES -Unknow=0 -PLC=1 -SLC500=2 -LGX=3 - -# EIP DATA TYPES -PLC_BIT=1 -PLC_BIT_STRING=2 -PLC_BYTE_STRING=3 -PLC_INTEGER=4 -PLC_TIMER=5 -PLC_COUNTER=6 -PLC_CONTROL=7 -PLC_FLOATING=8 -PLC_ARRAY=9 -PLC_ADRESS=15 -PLC_BCD=16 - -# LOGIX DATA TYPES -LGX_BOOL=0xC1 -LGX_BITARRAY=0xD3 -LGX_SINT=0xC2 -LGX_INT=0xC3 -LGX_DINT=0xC4 -LGX_REAL=0xCA - -class Eip_Session(Structure): - _fields_ = [ - ('sock',c_int), - ('Session_Handle', c_uint), - ('Sender_ContextL',c_int), - ('Sender_ContextH',c_int), - ('timeout', c_int), - ('references', c_int), - ('Data', c_void_p), - ] - -class Eip_Connection(Structure): - _fields_ = [ - ('Eip_Session', Eip_Session), - ('references', c_int), - ('Data', c_void_p), - ('ConnectionSerialNumber', c_uint), - ('OriginatorVendorID', c_uint), - ('OriginatorSerialNumber', c_int), - ('OT_ConnID', c_int), - ('TO_ConnID', c_int), - ('packet', c_short), - ('Path_size', c_byte) - ] - -class Eip_PLC_Read(Structure): - _fields_ = [ - ('type', c_int), - ('Varcount', c_int), - ('totalise', c_int), - ('elementsize', c_int), - ('mask', c_uint), - ] - -class TuxEIPException(Exception): - def __init__(self, value): - self.value = value - - def __str__(self): - return repr(self.value) - -class TuxEIP: - - def __init__(self, **kwargs): - self.__libpath = kwargs.get("libpath", "libtuxeip.dylib") - self.__tuxeip = CDLL(self.__libpath) - self.__tuxeip._cip_err_msg.restype = c_char_p - - def __del__(self): - del self.__tuxeip - - def OpenSession(self, slaveip_, slaveport_=44818, slavetimeout_=1000): - self.__tuxeip._OpenSession.restype = POINTER(Eip_Session) - - # Convert params to C types - slaveip = c_char_p(slaveip_) - slaveport = c_int(slaveport_) - slavetimeout = c_int(slavetimeout_) - - session = self.__tuxeip._OpenSession(slaveip, slaveport, slavetimeout) - - #print self.__tuxeip._cip_err_msg, self.__tuxeip._cip_errno, self.__tuxeip._cip_ext_errno - - if bool(session) == False: - raise TuxEIPException("Could not open session to " + str(slaveip) + ":" + str(slaveport)) - - return session - - def RegisterSession(self, sess_): - self.__tuxeip._RegisterSession.restype = c_int - reg = self.__tuxeip._RegisterSession(sess_) - - if reg != False: - raise TuxEIPException("Could not register session") - - return reg - - def ConnectPLCOverCNET(self, sess_, plctype_, priority_, timeoutticks_, connid_, conserial_, - vendorid_, serialnum_, timeoutmult_, rpi_, transport_, slavepath_): - # Convert params to C types - priority = c_byte(priority_) - timeoutticks = c_byte(timeoutticks_) - connid = c_uint(connid_) - conserial = c_ushort(conserial_) - vendorid = c_ushort(vendorid_) - serialnum = c_uint(serialnum_) - timeutmult = c_byte(timeoutmult_) - rpi = c_uint(rpi_) - transport = c_byte(transport_) - slavepath = c_char_p(slavepath_) - pathlength = len(slavepath_) - - self.__tuxeip._ConnectPLCOverCNET.restype = POINTER(Eip_Connection) - - connection = self.__tuxeip._ConnectPLCOverCNET( - sess_, - plctype_, - priority, - timeoutticks, - connid, - conserial, - vendorid, - serialnum, - timeutmult, - rpi, - transport, - slavepath, - pathlength - ) - - if bool(connection) == False: - raise TuxEIPException("Could not connect to CPU") - - return connection - - def ReadLgxData(self, sess_, conn_, var_, num_): - self.__tuxeip._ReadLgxData.restype = POINTER(Eip_PLC_Read) - readdata = self.__tuxeip._ReadLgxData(sess_, conn_, var_, num_) - - if bool(readdata) == False: - raise TuxEIPException("Read data failed") - - return readdata - - def WriteLGXData(self, sess_, conn_, address_, datatype_, data_, num_ ): - if datatype_ == LGX_INT or datatype_ == LGX_BOOL or datatype_ == LGX_DINT or datatype_ == LGX_SINT: - data = c_int(data_) - elif datatype_ == LGX_REAL: - data = c_float(data_) - else: - raise TuxEIPException("Write data failed") - - data = self.__tuxeip._WriteLgxData(sess_, conn_, address_, datatype_, byref(data), num_) - - return data - - def ReadLGXDataAsFloat(self, sess_, conn_, var_, num_): - data = self.ReadLgxData(sess_, conn_, var_, num_) - d = self.GetLGXValueAsFloat(data) - self.FreePLCRead(data) - return d - - def ReadLGXDataAsInteger(self, sess_, conn_, var_, num_): - data = self.ReadLgxData(sess_, conn_, var_, num_) - d = self.GetLGXValueAsInteger(data) - self.FreePLCRead(data) - return d - - def ReadPLCDataAsFloat(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_): - data = self.ReadPLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_) - d = self.PCCC_GetValueAsFloat(data) - self.FreePLCRead(data) - return d - - def ReadPLCDataAsInteger(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_): - data = self.ReadPLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_) - d = self.PCCC_GetValueAsInteger(data) - self.FreePLCRead(data) - return d - - def ReadPLCData(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_): - self.__tuxeip._ReadPLCData.restype = POINTER(Eip_PLC_Read) - readdata = self.__tuxeip._ReadPLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_, - tns_, address_, number_) - - if bool(readdata) == False: - raise TuxEIPException("Read data failed") - - return readdata - - def GetLGXValueAsFloat(self, readdata_): - if bool(readdata_) == False: - return None - - self.__tuxeip._GetLGXValueAsFloat.restype = c_float - values = [] - for i in range(0, readdata_.contents.Varcount): - v = self.__tuxeip._GetLGXValueAsFloat(readdata_, i) - values.append(v) - - return values - - def GetLGXValueAsInteger(self, readdata_): - if bool(readdata_) == False: - return None - - self.__tuxeip._GetLGXValueAsInteger.restype = c_int - values = [] - for i in range(0, readdata_.contents.Varcount): - v = self.__tuxeip._GetLGXValueAsInteger(readdata_, i) - values.append(v) - - return values - - def PCCC_GetValueAsFloat(self, readdata_): - if bool(readdata_) == False: - return None - - self.__tuxeip._PCCC_GetValueAsFloat.restype = c_float - values = [] - for i in range(0, readdata_.contents.Varcount): - v = self.__tuxeip._PCCC_GetValueAsFloat(readdata_, i) - values.append(v) - - return values - - def PCCC_GetValueAsInteger(self, readdata_): - if bool(readdata_) == False: - return None - - self.__tuxeip._PCCC_GetValueAsInteger.restype = c_int - values = [] - for i in range(0, readdata_.contents.Varcount): - v = self.__tuxeip._PCCC_GetValueAsInteger(readdata_, i) - values.append(v) - - return values - - def WritePLCData(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, datatype_, data_, number_): - - if datatype_ == PLC_INTEGER: - data = c_int(data_) - elif datatype_ == PLC_FLOATING: - data = c_float(data_) - else: - raise TuxEIPException("Variable type not supported" + str(datatype_)) - - result = self.__tuxeip._WritePLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_, - tns_, address_, datatype_, byref(data), number_) - - return result - - def Forward_Close(self, conn_): - self.__tuxeip._Forward_Close(conn_) - - def UnRegisterSession(self, sess_): - self.__tuxeip._UnRegisterSession(sess_) - - def CloseSession(self, sess_): - self.__tuxeip.CloseSession(sess_) - - def FreePLCRead(self, data_): - self.__tuxeip._FreePLCRead(data_) diff --git a/python/tuxeip.pyc b/python/tuxeip.pyc deleted file mode 100644 index 92f8848..0000000 Binary files a/python/tuxeip.pyc and /dev/null differ diff --git a/www/app.coffee b/www/app.coffee new file mode 100644 index 0000000..8ad43f6 --- /dev/null +++ b/www/app.coffee @@ -0,0 +1,101 @@ +express = require('express') +path = require('path') +fs = require('fs') +logger = require('morgan') +methodOverride = require('method-override') +bodyParser = require('body-parser') +errorHandler = require('errorhandler') +app = express() +fns = undefined + +app.locals.DB_TYPE = 'MySQL' +# or "MySQL" + +###* +* Configuration +### + +if app.locals.DB_TYPE == 'MySQL' + fns = require('./functions_MySQL.coffee') + mysql = require('mysql') + db_config = + host: 'localhost' + user: 'website' + password: 'henrypump' + database: 'poconsole' + app.locals.pool = mysql.createPool(db_config) + + # handleDisconnect = -> + # console.log 'Handling db disconnect gracefully' + # app.locals.db = mysql.createConnection(db_config) + # app.locals.db.connect (err) -> + # if err + # console.log 'error when connecting to db:', err + # setTimeout handleDisconnect, 2000 + # return + # app.locals.db.on 'error', (err) -> + # console.log 'db error', err + # if err.code == 'PROTOCOL_CONNECTION_LOST' + # handleDisconnect() + # else + # throw err + # return + # return + # handleDisconnect() +else + fns = require('./functions_SQLite.coffee') + +app.set 'port', process.env.PORT or 80 +app.set 'views', path.join(__dirname, 'views') +app.engine '.html', require('ejs').renderFile +app.set 'view engine', 'html' +#app.use(favicon(__dirname + '/public/img/favicon.ico')); +app.use logger('dev') +app.use methodOverride() +app.use bodyParser.json() +app.use bodyParser.urlencoded(extended: true) +#app.use(express["static"](path.join(__dirname, 'public'))); +app.use express.static(__dirname + '/public') +app.use '/bower_components', express.static(__dirname + '/bower_components') +app.use '/node_modules', express.static(__dirname + '/node_modules') + +###* +* Routes +### + +angular = (req, res) -> + res.render 'angularIndex' + return + +app.post '/json/tag/add', fns.createTag # Adds a tag to the scan list +app.post '/json/tag/update/', fns.updateTag # Updates tag data +app.get '/json/tag/delete/:tag', fns.deleteTag # Removes a tag from the scan list +app.get '/json/tag/:id', fns.getTag # Gets a specific tag in the scan list +app.get '/json/tag', fns.getAllTags # Lists all tags in the scan list +app.get '/json/val/:tag', fns.latestValueSingleTag # Gets the latest value of a single tag +app.get '/json/series/:tag/:hours', fns.seriesTagValues # Gets all the values of a tag for the last X hours +app.get '/json/valBetween/:tag/:startDatetime/:endDatetime', fns.seriesTagValuesBetween # Gets the values of a tag between the start time and end time +app.get '/json/CSV/all', fns.allDataCSV # Gets a CSV of all values stored +app.get '/json/CSV/:tag/:startDatetime/:endDatetime', fns.seriesCSVBetween # Gets a CSV of the values of a tag between the start time and end time +app.get '/json/CSV/:tag/:hours', fns.seriesCSV # Gets a CSV of the values of a tag for the last x hours +app.get '/json/all', fns.latestValueAllTags # Gets the latest values of all tags in the scan list +app.get '/json/config', fns.getSetup # Gets the contents of the config table +app.post '/json/config', fns.updateSetup # Adds a new parameter to the config table +app.get '/json/logger/status', fns.checkLoggerStatus # Gets the status of the data logger +app.get '/json/logger/restart', fns.restartLogger # Restarts the data logger +app.get '/json/clearDatabase/all', fns.clearValues # Removes all tag values from the database +app.get '/json/clearDatabase/:id', fns.clearValues # Removes tag values from the database +app.get '*', angular + +###* +* Start Server +### + +connectionsArray = [] +s_port = 3000 +server = app.listen(s_port, -> + host = server.address().address + port = server.address().port + console.log 'POConsole listening at http://%s:%s', host, port + return +) diff --git a/www/dbcreate_MySQL.sql b/www/dbcreate_MySQL.sql index 4bf0cd6..9268937 100644 --- a/www/dbcreate_MySQL.sql +++ b/www/dbcreate_MySQL.sql @@ -1,18 +1,51 @@ -CREATE DATABASE TagData; -CREATE TABLE `TagData`.`tags` ( - `id` INT NOT NULL AUTO_INCREMENT, - `tagName` VARCHAR(128) NULL, - `dateAdded` timestamp NULL DEFAULT CURRENT_TIMESTAMP, - `units` VARCHAR(16) NULL, - `deleted` INT NULL DEFAULT 0, - PRIMARY KEY (`id`)); +CREATE DATABASE poconsole; +USE poconsole; +CREATE TABLE IF NOT EXISTS tag_classes( + id int(11) NOT NULL AUTO_INCREMENT, + tag_class varchar(64), + description varchar(64), + PRIMARY KEY (id) +); -CREATE TABLE `TagData`.`values` ( - `id` INT NOT NULL AUTO_INCREMENT, - `tagID` INT NULL, - `val` FLOAT NULL, - `dateAdded` timestamp NULL DEFAULT CURRENT_TIMESTAMP, - PRIMARY KEY (`id`)); +CREATE TABLE IF NOT EXISTS tags( + id int(11) NOT NULL AUTO_INCREMENT, + name varchar(128), + class int(11), + tag varchar(128), + description varchar(128), + data_type varchar(32), + change_threshold float, + guarantee_sec integer(11), + map_function varchar(64), + units varchar(64), + minExpected varchar(64), + maxExpected varchar(64),  + deleted INT NULL DEFAULT 0, + PRIMARY KEY (id) +); + + +CREATE TABLE IF NOT EXISTS tag_vals( + id int(11) NOT NULL AUTO_INCREMENT, + dtime datetime, + tagID int, + val float, + PRIMARY KEY (id) +); + +CREATE TABLE IF NOT EXISTS config ( + id INT NOT NULL AUTO_INCREMENT, + parameter varchar(128), + val varchar(128), + dateAdded TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (id) +); + +INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (1, 'stroke', 'Stroke Information'); +INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (2, 'history', 'Historical Data'); +INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (3, 'gaugeoff', 'Gauge Off Data'); +INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (4, 'welltest', 'Well Test Data'); +INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (5, 'custom', 'Custom tags'); CREATE USER 'website'@'localhost' IDENTIFIED BY 'henrypump'; GRANT ALL ON *.* TO 'website'@'localhost'; @@ -20,4 +53,4 @@ CREATE USER 'admin'@'localhost' IDENTIFIED BY 'henrypump'; GRANT ALL ON *.* to 'admin'@'localhost'; CREATE USER 'admin'@'%' IDENTIFIED BY 'henrypump'; GRANT ALL ON *.* to 'admin'@'%'; -FLUSH PRIVILEGES; \ No newline at end of file +FLUSH PRIVILEGES; diff --git a/www/dbcreate_SQLite.sql b/www/dbcreate_SQLite.sql index 37c244e..a63ee31 100644 --- a/www/dbcreate_SQLite.sql +++ b/www/dbcreate_SQLite.sql @@ -1,8 +1,18 @@ +CREATE TABLE IF NOT EXISTS tag_classes( + id INTEGER PRIMARY KEY, + tag_class TEXT, + description TEXT +); + CREATE TABLE IF NOT EXISTS tags ( id INTEGER PRIMARY KEY, - tagName TEXT, - vanityName TEXT, + name TEXT, + class TEXT, + tag TEXT, description TEXT, + data_type TEXT, + change_threshold REAL, + guarantee_sec INTEGER, units TEXT, minExpected REAL, maxExpected REAL, @@ -10,11 +20,11 @@ CREATE TABLE IF NOT EXISTS tags ( deleted INTEGER DEFAULT 0 ); -CREATE TABLE IF NOT EXISTS vals ( +CREATE TABLE IF NOT EXISTS tag_vals ( id INTEGER PRIMARY KEY, tagID INTEGER, val REAL, - dateAdded TIMESTAMP DEFAULT CURRENT_TIMESTAMP + dtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP ); CREATE TABLE IF NOT EXISTS config ( @@ -23,3 +33,9 @@ CREATE TABLE IF NOT EXISTS config ( val TEXT, dateAdded TIMESTAMP DEFAULT CURRENT_TIMESTAMP ); + +INSERT INTO tag_classes (id, tag_class, description) VALUES (1, 'stroke', 'Stroke Information'); +INSERT INTO tag_classes (id, tag_class, description) VALUES (2, 'history', 'Historical Data'); +INSERT INTO tag_classes (id, tag_class, description) VALUES (3, 'gaugeoff', 'Gauge Off Data'); +INSERT INTO tag_classes (id, tag_class, description) VALUES (4, 'welltest', 'Well Test Data'); +INSERT INTO tag_classes (id, tag_class, description) VALUES (5, 'custom', 'Custom tags'); diff --git a/www/functions_MySQL.coffee b/www/functions_MySQL.coffee new file mode 100644 index 0000000..9f3d5cd --- /dev/null +++ b/www/functions_MySQL.coffee @@ -0,0 +1,418 @@ +# var dbFile = "/usr/db/data.db"; +dbFile = '/mnt/usb/data.db' +# var dbFile = '/Users/patrickjmcd/data.db'; + + +dString_to_sqlite = (dString) -> + ###* + * Takes a date string in the form YYYYMMDD_HHmmSS and returns it in SQLite format (YYYY-MM-DD HH:mm:SS) + * @param {String} dString + * @return {String} sqliteString + ### + + re = /(\d{4})(\d{2})(\d{2})_(\d{2})(\d{2})(\d{2})/ + fd = re.exec(dString) + if fd + sqliteString = '' + sqliteString.concat fd[1], '-', fd[2], '-', fd[3], ' ', fd[4], ':', fd[5], ':', fd[6] + else + null + + +sqlite_to_dString = (sqliteDate) -> + ###* + * Takes a sqlite date string in the form YYYY-MM-DD HH:mm:SS and returns it in format YYYYMMDD_HHmmSS + * @param {String} sqliteDate + * @return {String} dString + ### + + re = /(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})/ + fd = re.exec(sqliteDate) + if fd + dString = '' + dString.concat fd[1], fd[2], fd[3], '_', fd[4], fd[5], fd[6] + else + null + + +getAllTags = (pool, callback) -> + pool.getConnection (err, db)-> + query = 'SELECT * FROM tags WHERE deleted = 0' + db.query query, (err, rows, fields) -> + db.release() + if err + return callback(err, null) + console.log err + else + return callback(null, rows) + undefined + + +exports.getAllTags = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'SELECT * FROM tags WHERE deleted = 0' + db.query query, (err, rows, fields) -> + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + tags: rows + undefined + + +exports.createTag = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'INSERT INTO tags (tag, units, minExpected, maxExpected, name, description, class, guarantee_sec, change_threshold) VALUES (?, ?, ?, ?, ?, ?, 5, ?, ?)' + db.query query, [req.body.tag, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.name, req.body.description, req.body.guarantee_sec, req.body.change_threshold], (err, results) -> + if err + res.json + status: 'error' + message: err + console.log err + else + res.json status: 'OK' + undefined + + +exports.getTag = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'SELECT * FROM tags WHERE id = ?' + db.query query, [req.params.id], (err, rows) -> + db.release() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + tags: rows + undefined + + +exports.updateTag = (req, res) -> + console.log(req.body) + req.app.locals.pool.getConnection (err, db) -> + query = 'UPDATE tags set tag = ?, units = ?, minExpected = ?, maxExpected = ?, name = ?, description = ?, guarantee_sec = ?, change_threshold = ? WHERE id = ?' + db.query query, [req.body.tag, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.name, req.body.description, req.body.guarantee_sec, req.body.change_threshold, req.body.id], (err, results) -> + db.release() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + undefined + +exports.deleteTag = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'UPDATE tags SET deleted = 1 WHERE id = ?' + db.query query, [req.params.tag], (err, results) -> + db.release() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + undefined + +exports.seriesTagValues = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dateAdded >= DATE_SUB(NOW(), INTERVAL 1 HOUR)' + db.query query, [parseInt(req.params.tag)], (err, rows) -> + db.release() + if err + console.log err + res.json + status: 'error' + message: err + query: query + else + res.json + status: 'OK' + tag: req.params.tag + vals: rows + undefined + +exports.seriesTagValuesBetween = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dtime >= ? AND dtime <= ?' + db.query query, [parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime)], (err, rows) -> + db.release() + if err + console.log err + res.json + status: 'error' + message: err + query: query + else + res.json + status: 'OK' + tag: req.params.tag + startDatetime: dString_to_sqlite(req.params.startDatetime) + endDatetime: dString_to_sqlite(req.params.endDatetime) + vals: rows + undefined + +createCSVrow = (header, dataRow) -> + i = header.indexOf(dataRow.name) + csvRow = dataRow.id.toString() + ',' + dataRow.dtime + ',' + if i >= 0 + j = 2 + while j < header.length + if j == i + csvRow = csvRow + dataRow.val.toString() + ',' + else + csvRow = csvRow + ',' + j++ + csvRow = csvRow.slice(0, -1) + '\u000d' + return csvRow + return + +exports.allDataCSV = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID' + db.query query, (err, rows) -> + db.release() + if err + console.log err + res.json + status: 'error' + message: err + query: query + else + getAllTags req.app.locals.pool, (err, tags) -> + if err + console.log err + else + csvString = '' + da = [ + 'id' + 'DateAdded' + ] + tagVanityNames = tags.map((t) -> + t.name + ) + h = da.concat(tagVanityNames) + console.log h + csvString = csvString + h.join(',') + '\u000d' + i = 0 + while i < rows.length + csvString = csvString + createCSVrow(h, rows[i]) + i++ + res.set 'Content-Type', 'text/csv' + res.set 'Content-Disposition', 'attachment;filename=tagdata.csv' + res.send csvString + undefined + +exports.seriesCSV = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'SELECT v.id, t.name, v.val, v.dateAdded FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND v.dateAdded > DATETIME(\'now\', \'-1 HOUR\')' + db.query query, [parseInt(req.params.tag)], (err, rows) -> + db.release() + if err + console.log err + res.json + status: 'error' + message: err + query: query + else + csvString = '' + h = [ + 'id' + 'DateAdded' + rows[0].vanityName + ] + csvString = csvString + h.join(',') + '\u000d' + i = 0 + while i < rows.length + csvString = csvString + [ + rows[i].id + rows[i].dateAdded + rows[i].val + ].join(',') + '\u000d' + i++ + res.set 'Content-Type', 'text/csv' + res.set 'Content-Disposition', 'attachment;filename=tagdata.csv' + res.send csvString + undefined + +exports.seriesCSVBetween = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND v.dtime >= ? AND v.dtime <= ?' + db.query query, [parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime)], (err, rows) -> + db.release() + if err + console.log err + res.json + status: 'error' + message: err + query: query + else + csvString = '' + h = [ + 'id' + 'DateAdded' + rows[0].name + ] + csvString = csvString + h.join(',') + '\u000d' + i = 0 + while i < rows.length + csvString = csvString + [ + rows[i].id + rows[i].dtime + rows[i].val + ].join(',') + '\u000d' + i++ + res.set 'Content-Type', 'text/csv' + res.set 'Content-Disposition', 'attachment;filename=tagdata.csv' + res.send csvString + undefined + +exports.latestValueSingleTag = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'SELECT * FROM tag_vals WHERE id = (SELECT MAX(id) FROM tag_vals WHERE tagID = ?)' + db.query query, [req.params.tag], (err, rows) -> + db.release() + console.log rows + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + tag_val: rows[0] + undefined + +exports.latestValueAllTags = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'SELECT v1.id as id, v1.dtime as dtime, t.id as t_id, t.name as name, t.tag as tag, v1.val as val, t.units as units, t.description as description, t.minExpected as minExpected, t.maxExpected as maxExpected FROM tag_vals v1 LEFT JOIN tags t ON t.id = v1.tagID WHERE v1.id = (SELECT v2.id FROM tag_vals v2 WHERE v2.tagID = v1.tagID ORDER BY v2.id DESC LIMIT 1)' + db.query query, [req.params.id], (err, rows) -> + db.release() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + vals: rows + undefined + +exports.checkLoggerStatus = (req, res) -> + fs = require('fs') + # var ps = require("ps-node"); + running = require('is-running') + fs.readFile '/root/tagserver.pid', (derr, ddata) -> + if derr + console.log 'Problem getting PID of tagserver' + res.json + status: 'error' + message: 'Problem getting PID of tagserver' + else + res.json + status: 'OK' + running: running(ddata) + return + return + +exports.restartLogger = (req, res) -> + exec = require('child_process').exec + exec '/etc/init.d/loggers start', (error, stdout, stderr) -> + if error + res.json + status: 'error' + message: error + else + res.json + status: 'OK' + return + return + +exports.getSetup = (req, res) -> + req.app.locals.pool.getConnection (err, db) -> + query = 'SELECT parameter, val, dateAdded FROM config GROUP BY parameter;' + db.query query, [req.params.id], (err, rows) -> + db.release() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + config: rows + undefined + +exports.updateSetup = (req, res) -> + exec = require('child_process').exec + req.app.locals.pool.getConnection (err, db) -> + query = 'INSERT INTO config (parameter, val) VALUES (?, ?)'; + db.query query, [req.body.parameter, req.body.val], (err) -> + db.release() + if err + console.log runErr: err + res.json + status: 'error' + message: err + query: query + else + res.redirect '/#/setup' + exec '/etc/init.d/loggers stop', (error, stdout, stderr) -> + if error + console.log + status: 'error' + message: error + query: query + setTimeout (-> + exec '/etc/init.d/loggers start', (error, stdout, stderr) -> + if error + console.log + status: 'error' + message: error + query: query + ), 5000 + undefined + +exports.clearValues = (req, res) -> + if req.params.id + req.app.locals.pool.getConnection (err, db) -> + query = 'DELETE FROM tag_vals WHERE tagID = ?;'; + db.query query, [req.params.id], (err) -> + db.release() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + else + req.app.locals.pool.getConnection (err, db) -> + query = 'DELETE FROM tag_vals WHERE id >= 0;'; + db.query query, (err) -> + db.release() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + undefined diff --git a/www/functions_MySQL.js b/www/functions_MySQL.js index 462ac2d..4d50eca 100644 --- a/www/functions_MySQL.js +++ b/www/functions_MySQL.js @@ -76,5 +76,5 @@ exports.allTags = function(req, res){ }; exports.allValues = function(req, res){ - res.json(status:"error", message:"not implemented"}); + res.json({status:"error", message:"not implemented"}); }; diff --git a/www/functions_SQLite.coffee b/www/functions_SQLite.coffee new file mode 100644 index 0000000..2a3576d --- /dev/null +++ b/www/functions_SQLite.coffee @@ -0,0 +1,517 @@ +# var dbFile = "/usr/db/data.db"; +dbFile = '/mnt/usb/data.db' +# var dbFile = '/Users/patrickjmcd/data.db'; + + +dString_to_sqlite = (dString) -> + ###* + * Takes a date string in the form YYYYMMDD_HHmmSS and returns it in SQLite format (YYYY-MM-DD HH:mm:SS) + * @param {String} dString + * @return {String} sqliteString + ### + + re = /(\d{4})(\d{2})(\d{2})_(\d{2})(\d{2})(\d{2})/ + fd = re.exec(dString) + if fd + sqliteString = '' + sqliteString.concat fd[1], '-', fd[2], '-', fd[3], ' ', fd[4], ':', fd[5], ':', fd[6] + else + null + + +sqlite_to_dString = (sqliteDate) -> + ###* + * Takes a sqlite date string in the form YYYY-MM-DD HH:mm:SS and returns it in format YYYYMMDD_HHmmSS + * @param {String} sqliteDate + * @return {String} dString + ### + + re = /(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})/ + fd = re.exec(sqliteDate) + if fd + dString = '' + dString.concat fd[1], fd[2], fd[3], '_', fd[4], fd[5], fd[6] + else + null + + +getAllTags = (callback) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'SELECT * FROM tags WHERE deleted = 0' + prepQuery = db.prepare(query) + prepQuery.all (err, rows) -> + prepQuery.finalize() + db.close() + if err + return callback(err, null) + console.log err + else + return callback(null, rows) + return + return + return + + +exports.getAllTags = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'SELECT * FROM tags WHERE deleted = 0' + prepQuery = db.prepare(query) + prepQuery.all (err, rows) -> + prepQuery.finalize() + db.close() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + tags: rows + return + return + return + + +exports.createTag = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'INSERT INTO tags (tag, units, minExpected, maxExpected, name, description, class, guarantee_sec, change_threshold) VALUES (?, ?, ?, ?, ?, ?, 5, ?, ?)' + prepQuery = db.prepare(query) + prepQuery.run req.body.tag, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.name, req.body.description, req.body.guarantee_sec, req.body.change_threshold, (err) -> + prepQuery.finalize() + db.close() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json status: 'OK' + return + return + return + + +exports.getTag = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'SELECT * FROM tags WHERE id = ?' + prepQuery = db.prepare(query) + prepQuery.all req.params.id, (err, rows) -> + prepQuery.finalize() + db.close() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + tags: rows + return + return + return + + +exports.updateTag = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'UPDATE tags set tag = ?, units = ?, minExpected = ?, maxExpected = ?, name = ?, description = ?, guarantee_sec = ?, change_threshold = ? WHERE id = ?' + prepQuery = db.prepare(query) + prepQuery.run req.body.tag, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.name, req.body.description, req.body.guarantee_sec, req.body.change_threshold, req.body.id, (err) -> + prepQuery.finalize() + db.close() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + return + return + return + +exports.deleteTag = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'UPDATE tags SET deleted = 1 WHERE id = ?' + prepQuery = db.prepare(query) + prepQuery.run req.params.tag, (err) -> + prepQuery.finalize() + db.close() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + return + return + return + +exports.seriesTagValues = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dtime > DATETIME(\'now\', \'-1 HOUR\')' + prepQuery = db.prepare(query) + prepQuery.all parseInt(req.params.tag), (err, rows) -> + prepQuery.finalize() + db.close() + if err + console.log err + res.json + status: 'error' + message: err + query: query + else + res.json + status: 'OK' + tag: req.params.tag + vals: rows + return + return + return + +exports.seriesTagValuesBetween = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dtime >= DATETIME(?) AND dtime <= DATETIME(?)' + prepQuery = db.prepare(query) + prepQuery.all parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime), (err, rows) -> + prepQuery.finalize() + db.close() + if err + console.log err + res.json + status: 'error' + message: err + query: query + else + res.json + status: 'OK' + tag: req.params.tag + startDatetime: dString_to_sqlite(req.params.startDatetime) + endDatetime: dString_to_sqlite(req.params.endDatetime) + vals: rows + return + return + return + +createCSVrow = (header, dataRow) -> + i = header.indexOf(dataRow.name) + csvRow = dataRow.id.toString() + ',' + dataRow.dtime + ',' + if i >= 0 + j = 2 + while j < header.length + if j == i + csvRow = csvRow + dataRow.val.toString() + ',' + else + csvRow = csvRow + ',' + j++ + csvRow = csvRow.slice(0, -1) + '\u000d' + return csvRow + return + +exports.allDataCSV = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID' + prepQuery = db.prepare(query) + prepQuery.all (err, rows) -> + prepQuery.finalize() + db.close() + if err + console.log err + res.json + status: 'error' + message: err + query: query + else + getAllTags (err, tags) -> + if err + console.log err + else + csvString = '' + da = [ + 'id' + 'dtime' + ] + tagnames = tags.map((t) -> + t.name + ) + h = da.concat(tagnames) + console.log h + csvString = csvString + h.join(',') + '\u000d' + i = 0 + while i < rows.length + csvString = csvString + createCSVrow(h, rows[i]) + i++ + res.set 'Content-Type', 'text/csv' + res.set 'Content-Disposition', 'attachment;filename=tagdata.csv' + res.send csvString + return + return + return + return + +exports.seriesCSV = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND v.dtime > DATETIME(\'now\', \'-1 HOUR\')' + prepQuery = db.prepare(query) + prepQuery.all parseInt(req.params.tag), (err, rows) -> + prepQuery.finalize() + db.close() + if err + console.log err + res.json + status: 'error' + message: err + query: query + else + csvString = '' + h = [ + 'id' + 'dtime' + rows[0].name + ] + csvString = csvString + h.join(',') + '\u000d' + i = 0 + while i < rows.length + csvString = csvString + [ + rows[i].id + rows[i].dtime + rows[i].val + ].join(',') + '\u000d' + i++ + res.set 'Content-Type', 'text/csv' + res.set 'Content-Disposition', 'attachment;filename=tagdata.csv' + res.send csvString + return + return + return + +exports.seriesCSVBetween = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND dtime >= DATETIME(?) AND dtime <= DATETIME(?)' + prepQuery = db.prepare(query) + prepQuery.all parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime), (err, rows) -> + prepQuery.finalize() + db.close() + if err + console.log err + res.json + status: 'error' + message: err + query: query + else + csvString = '' + h = [ + 'id' + 'dtime' + rows[0].name + ] + csvString = csvString + h.join(',') + '\u000d' + i = 0 + while i < rows.length + csvString = csvString + [ + rows[i].id + rows[i].dtime + rows[i].val + ].join(',') + '\u000d' + i++ + res.set 'Content-Type', 'text/csv' + res.set 'Content-Disposition', 'attachment;filename=tagdata.csv' + res.send csvString + return + return + return + +exports.latestValueSingleTag = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'SELECT * FROM tag_vals WHERE id = (SELECT MAX(id) FROM tag_vals WHERE tagID = ?)' + prepQuery = db.prepare(query) + prepQuery.all req.params.tag, (err, rows) -> + console.log rows + prepQuery.finalize() + db.close() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + tag_val: rows[0] + return + return + return + +exports.latestValueAllTags = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + db.serialize -> + query = 'SELECT t.tag as tag, t.name as name, t.description as description, t.units as units, t.id as t_id, t.minExpected as min, t.maxExpected as max, MAX(v.id) as v_id, v.val as val, v.dtime as dtime FROM tag_vals v JOIN tags t ON v.tagID = t.id WHERE t.deleted = 0 GROUP BY v.tagID' + prepQuery = db.prepare(query) + prepQuery.all req.params.id, (err, rows) -> + prepQuery.finalize() + db.close() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + vals: rows + return + return + return + +exports.checkLoggerStatus = (req, res) -> + fs = require('fs') + # var ps = require("ps-node"); + running = require('is-running') + fs.readFile '/root/tagserver.pid', (derr, ddata) -> + if derr + console.log 'Problem getting PID of tagserver' + res.json + status: 'error' + message: 'Problem getting PID of tagserver' + else + res.json + status: 'OK' + running: running(ddata) + return + return + +exports.restartLogger = (req, res) -> + exec = require('child_process').exec + exec '/etc/init.d/loggers start', (error, stdout, stderr) -> + if error + res.json + status: 'error' + message: error + else + res.json + status: 'OK' + return + return + +exports.getSetup = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + query = 'SELECT parameter, val, dateAdded FROM config GROUP BY parameter;' + prepQuery = db.prepare(query) + prepQuery.all req.params.id, (err, rows) -> + prepQuery.finalize() + db.close() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + config: rows + return + return + +exports.updateSetup = (req, res) -> + exec = require('child_process').exec + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + console.log req.body.parameter, req.body.val + db.serialize -> + query = db.prepare('INSERT INTO config (parameter, val) VALUES (?, ?)') + query.run req.body.parameter, req.body.val, (err) -> + query.finalize() + db.close() + if err + console.log runErr: err + res.json + status: 'error' + message: err + query: query + else + res.redirect '/#/setup' + exec '/etc/init.d/loggers stop', (error, stdout, stderr) -> + if error + console.log + status: 'error' + message: error + query: query + setTimeout (-> + exec '/etc/init.d/loggers start', (error, stdout, stderr) -> + if error + console.log + status: 'error' + message: error + query: query + return + return + ), 5000 + return + return + return + return + +exports.clearValues = (req, res) -> + sqlite3 = require('sqlite3').verbose() + db = new (sqlite3.Database)(dbFile) + if req.params.id + db.serialize () -> + query = 'DELETE FROM tag_vals WHERE tagID = ?;'; + prepQuery = db.prepare(query) + prepQuery.run req.params.id, (err) -> + prepQuery.finalize() + db.close() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + else + db.serialize () -> + query = 'DELETE FROM tag_vals WHERE id >= 0;'; + prepQuery = db.prepare(query) + prepQuery.run req.params.id, (err) -> + prepQuery.finalize() + db.close() + if err + res.json + status: 'error' + message: err + console.log err + else + res.json + status: 'OK' + undefined diff --git a/www/package.json b/www/package.json index d00fe55..df2d0bd 100644 --- a/www/package.json +++ b/www/package.json @@ -14,7 +14,8 @@ "serve-favicon": "*", "sqlite3": "*", "n3-charts": "*", - "is-running": "*" + "is-running": "*", + "coffee-script": "*" }, "devDependencies": {}, "scripts": { diff --git a/www/public/css/app.css b/www/public/css/app.css new file mode 100644 index 0000000..4e87a3a --- /dev/null +++ b/www/public/css/app.css @@ -0,0 +1,15 @@ +.topMargin40 { + margin-top: 40px; +} + +.row-flex { + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + flex-wrap: wrap; +} +.row-flex > [class*='col-'] { + display: flex; + flex-direction: column; +} diff --git a/www/public/js/controller.js b/www/public/js/controller.js index 4527577..9174494 100644 --- a/www/public/js/controller.js +++ b/www/public/js/controller.js @@ -54,12 +54,12 @@ var sqlite_to_dString = function(sqliteDate){ }; var date_to_dString = function(inpDate){ - var year = inpDate.getUTCFullYear().pad(4); - var month = (inpDate.getUTCMonth() + 1).pad(2); - var day = inpDate.getUTCDate().pad(2); - var hour = inpDate.getUTCHours().pad(2); - var min = inpDate.getUTCMinutes().pad(2); - var sec = inpDate.getUTCSeconds().pad(2); + var year = inpDate.getFullYear().pad(4); + var month = (inpDate.getMonth() + 1).pad(2); + var day = inpDate.getDate().pad(2); + var hour = inpDate.getHours().pad(2); + var min = inpDate.getMinutes().pad(2); + var sec = inpDate.getSeconds().pad(2); return "".concat(year, month, day, "_", hour, min, sec); }; @@ -195,11 +195,13 @@ tsCtrlrs.factory('tags',function($q, $http, $log){ var createTag = function(tag){ $http.post('/json/tag/add', { - tagName: tag.tagName, - vanityName: tag.vanityName, + tag: tag.tag, + name: tag.name, units: tag.units, minExpected: tag.minExpected, maxExpected: tag.maxExpected, + guarantee_sec: tag.guarantee_sec, + change_threshold: tag.change_threshold, description: tag.description }).success(function(data){ return data; @@ -210,11 +212,13 @@ tsCtrlrs.factory('tags',function($q, $http, $log){ $log.info("updateTag called with "+ JSON.stringify(tag)); $http.post('/json/tag/update', { id: tag.id, - tagName: tag.tagName, - vanityName: tag.vanityName, + tag: tag.tag, + name: tag.name, units: tag.units, minExpected: tag.minExpected, maxExpected: tag.maxExpected, + guarantee_sec: tag.guarantee_sec, + change_threshold: tag.change_threshold, description: tag.description }).success(function(data){ return data; @@ -234,6 +238,28 @@ tsCtrlrs.factory('tags',function($q, $http, $log){ return deferred.promise; }; + var clearSingleTagData = function(id){ + var deferred = $q.defer(); + var url = '/json/clearDatabase/' + id; + $http.get(url).success(function(data) { + deferred.resolve({ + status: data.status + }); + }); + return deferred.promise; + }; + + var clearAllTagData = function(){ + var deferred = $q.defer(); + var url = '/json/clearDatabase/all'; + $http.get(url).success(function(data) { + deferred.resolve({ + status: data.status + }); + }); + return deferred.promise; + }; + return { getTag: getTag, getTagList: getTagList, @@ -243,6 +269,8 @@ tsCtrlrs.factory('tags',function($q, $http, $log){ createTag: createTag, updateTag: updateTag, deleteTag: deleteTag, + clearSingleTagData: clearSingleTagData, + clearAllTagData: clearAllTagData }; }); @@ -396,6 +424,32 @@ tsCtrlrs.controller('tagsCtrl', function($scope, $route, $http, $routeParams, Pa }); }; + $scope.openClearTagData = function(id){ + var getTag = tags.getTag(id); + getTag.then(function(data){ + if (data.status == "OK"){ + $scope.error = false; + $scope.dTagValues = data.tag; + $log.info("Thinking about deleting tag data with parameters: "+ JSON.stringify($scope.dTagValues)); + } else { + $scope.error = data.message; + } + }); + }; + + $scope.deleteTagValues = function(id){ + var clearSingleTagData = tags.clearSingleTagData(id); + clearSingleTagData.then(function(data){ + $log.info("deleting tag "+ id + " status: " + data.status); + if (data.status == "OK"){ + $scope.error = false; + $scope.loadTagList(); + } else { + $scope.error = data.message; + } + }); + }; + $scope.openEditTag = function(id){ var getTag = tags.getTag(id); getTag.then(function(data){ @@ -448,7 +502,7 @@ tsCtrlrs.controller('tagValsCtrl', function($scope, $route, $http, $routeParams, if (data.status == "OK"){ $scope.data = data; $scope.data.vals = $scope.data.vals.map(function(x){ - return {id: x.id, tagID: x.tagID, val: x.val, dateAdded: new Date(x.dateAdded)}; + return {id: x.id, tagID: x.tagID, val: x.val, dtime: new Date(x.dtime)}; }); $scope.error = false; @@ -466,7 +520,7 @@ tsCtrlrs.controller('tagValsCtrl', function($scope, $route, $http, $routeParams, ], axes: { x: { - key: "dateAdded", + key: "dtime", type: "date" } } diff --git a/www/public/partials/dashboard.html b/www/public/partials/dashboard.html index 5624dda..ee75c16 100644 --- a/www/public/partials/dashboard.html +++ b/www/public/partials/dashboard.html @@ -21,14 +21,16 @@
+ Download All Data
- +
+
{{ val.dtime | date: 'medium'}}
View Data
- \ No newline at end of file + diff --git a/www/public/partials/tagVals.html b/www/public/partials/tagVals.html index c003cb0..9b4d876 100644 --- a/www/public/partials/tagVals.html +++ b/www/public/partials/tagVals.html @@ -32,19 +32,31 @@
-

{{tag.vanityName}}

- From: To: +

{{tag.name}}

+
+
+ From: +
+
+ To: +
+
+ +
+
- - Download Data -
+
-
+
+ +
+
+ Download Data @@ -57,7 +69,7 @@ - +
{{val.id}} {{val.val}} {{tag.units}}{{val.dateAdded | sqlite_to_local}}{{val.dtime | date:'short'}}
diff --git a/www/public/partials/tags.html b/www/public/partials/tags.html index 20195c5..416bdf8 100644 --- a/www/public/partials/tags.html +++ b/www/public/partials/tags.html @@ -1,109 +1,166 @@ + + + + @@ -140,17 +197,19 @@ Max Expected Value Units + {{tag.id}} - {{tag.vanityName}} + {{tag.name}} {{tag.minExpected}} {{tag.maxExpected}} {{tag.units}} + + @@ -43,7 +44,7 @@ - +