MySQL working, need to retro SQLite

This commit is contained in:
Patrick McDonagh
2016-04-12 18:31:14 -05:00
parent c568c2dfdb
commit c8baa20927
17 changed files with 408 additions and 608 deletions

View File

@@ -1,8 +1,8 @@
#! /bin/sh
# /etc/init.d/loggers
# /etc/init.d/tagserver
### BEGIN INIT INFO
# Provides: loggers
# Provides: tagserver
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
@@ -17,22 +17,19 @@
case "$1" in
start)
echo "Starting loggers"
kill -9 $(cat /root/solar_ww.pid)
kill -9 $(cat /root/tagserver.pid)
# run application you want to start
#python /home/poconsole/src/dataLogger/alarmLogger.py &
#python /home/poconsole/src/dataLogger/dataLogger.py &
/usr/bin/python /home/poconsole/tagserver/python/tagserver_SQLite.py > /dev/null 2>&1 & echo $! > "/root/tagserver.pid"
;;
stop)
echo "Stopping loggers"
# kill application you want to stop
#killall python
kill -9 $(cat /root/tagserver.pid)
;;
*)
echo "Usage: /etc/init.d/loggers {start|stop}"
echo "Usage: /etc/init.d/tagserver {start|stop}"
exit 1
;;
esac

View File

@@ -11,4 +11,8 @@ sS'user'
p5
S'website'
p6
s.
sS'database'
p7
S'poconsole'
p8
s.

View File

@@ -3,7 +3,8 @@ import pickle
mysql_cfg = {
'host':'127.0.0.1',
'user':'website',
'password':'henrypump'
'password':'henrypump',
'database':'poconsole'
}
with open('mysql_cfg.pickle', 'wb') as pickleconfig:

View File

@@ -14,9 +14,6 @@ with open('mysql_cfg.pickle', 'rb') as cfgFile:
PLC_IP_ADDRESS = "10.10.10.3"
def readTag(addr, tag):
time.sleep(0.01)
c = ClxDriver()
@@ -36,7 +33,7 @@ def readTag(addr, tag):
class Tag():
global readTag, con, PLC_IP_ADDRESS
def __init__(self, name, tag, data_type, change_threshold, guarantee_sec, mapFn=None, plc_type='CLX'):
def __init__(self, name, tag, db_id, data_type, change_threshold, guarantee_sec, mapFn=None, plc_type='CLX', plc_ip_address='192.168.1.10'):
self.name = str(name)
self.tag = str(tag)
self.data_type = str(data_type)
@@ -48,13 +45,15 @@ class Tag():
self.mapFn = mapFn
self.plc_type = plc_type
self.readFn = readTag
self.db_id = db_id
if self.plc_type == "u800":
self.readFn = u800.readTag
self.plc_ip_address = plc_ip_address
def read(self, forceSend):
writeToDB = False
if self.tag:
v = self.readFn(PLC_IP_ADDRESS, self.tag)
v = self.readFn(self.plc_ip_address, self.tag)
if v:
if self.data_type == 'BOOL' or self.data_type == 'STRING':
val = v[0]
@@ -78,7 +77,7 @@ class Tag():
def sendToDB(self):
# TODO: Datetime
query = "INSERT INTO poconsole.tag_vals (dtime, name, val) VALUES ('{}', '{}', {})".format(time.strftime('%Y-%m-%d %H:%M:%S'), self.name, self.value)
query = "INSERT INTO poconsole.tag_vals (dtime, tagID, val) VALUES ('{}', '{}', {})".format(time.strftime('%Y-%m-%d %H:%M:%S'), self.db_id, self.value)
self.last_send_time = time.time()
print query
# TODO: CHECK ON THIS LOGIC -- with con:

View File

@@ -29,7 +29,7 @@ def readTag(addr, tag):
class Tag():
global readTag, con
def __init__(self, name, tag, data_type, change_threshold, guarantee_sec, mapFn=None, plc_type='CLK'):
def __init__(self, name, tag, db_id, data_type, change_threshold, guarantee_sec, mapFn=None, plc_type='CLK', plc_ip_address='192.168.1.10'):
self.name = name
self.tag = tag
self.data_type = data_type
@@ -41,13 +41,15 @@ class Tag():
self.mapFn = mapFn
self.plc_type = plc_type
self.readFn = readTag
self.db_id = db_id
if self.plc_type == "u800":
self.readFn = u800.readTag
self.plc_ip_address = plc_ip_address
def read(self, forceSend):
writeToDB = False
if self.tag:
v = readFn(PLC_IP_ADDRESS, self.tag)
v = readFn(self.plc_ip_address, self.tag)
if v:
if self.data_type == 'BOOL' or self.data_type == 'STRING':
val = v[0]
@@ -70,7 +72,7 @@ class Tag():
self.sendToDB()
def sendToDB(self):
query = "INSERT INTO tag_vals (dtime, name, val) VALUES ({}, '{}', {})".format(time.time(), self.name, self.value)
query = "INSERT INTO tag_vals (dtime, tagID, val) VALUES ({}, '{}', {})".format(time.time(), self.db_id, self.value)
print query
# TODO: CHECK ON THIS LOGIC -- with con:
cur = con.cursor()

View File

@@ -19,32 +19,76 @@ if mysql_cfg:
db = mysqlcon.connect(**mysql_cfg)
tag_store = {}
configProperties = {}
def main():
db.connect()
cur = db.cursor()
query = "SELECT * FROM poconsole.tags WHERE class = 5 AND deleted = 0"
query = "SELECT * FROM tags WHERE class = 5 AND deleted = 0"
cur.execute(query)
tags = cur.fetchall()
print tags
# [(1, u'Century Counter Up', 5, u'Century_Counter_Up', u'REAL', 10.0, 3600, None, 0)]
db.disconnect()
configObj = {}
db.connect()
cur = db.cursor()
query = "SELECT parameter, val FROM config GROUP BY parameter;"
cur.execute(query)
config = cur.fetchall()
db.disconnect()
for x in config:
configObj[x[0]] = x[1]
try:
configProperties['PLC_IP_ADDRESS'] = str(configObj['ip_address'])
print("FYI, using PLC IP Address from the database {0}".format(configProperties['PLC_IP_ADDRESS']))
except KeyError:
print("FYI, there is no PLC IP Address stored in the database, defaulting to 192.168.1.10")
configProperties['PLC_IP_ADDRESS'] = "192.168.1.10"
try:
configProperties['plc_type'] = str(configObj['plc_type'])
print("FYI, using PLC Type from the database {0}".format(configProperties['plc_type']))
except KeyError:
print("FYI, there is no PLC Type stored in the database, defaulting to CLX")
configProperties['plc_type'] = "CLX"
try:
configProperties['scan_rate'] = int(configObj['scan_rate'])
print("FYI, using Scan Rate from the database {0}".format(configProperties['scan_rate']))
except KeyError:
print("FYI, there is no Scan Rate stored in the database, defaulting to 10 seconds")
configProperties['scan_rate'] = 10
try:
sa_test = str(configObj['save_all'])
if sa_test == "true":
configProperties['save_all'] = True
else:
configProperties['save_all'] = False
print("FYI, value for save_all is {0}".format(configProperties['save_all']))
except KeyError:
print("FYI, there is no save_all value stored in the database, using False")
configProperties['save_all'] = False
for t in tags:
tag_store[t[1]] = Tag(t[1], t[3], t[4], t[5], t[6], mapFn=t[7])
tag_store[t[1]] = Tag(t[1], t[3], t[0], t[5], t[6], t[7], mapFn=t[8], plc_type=configProperties['plc_type'], plc_ip_address=configProperties['PLC_IP_ADDRESS'])
PLC_IP_ADDRESS = "10.10.10.3" # MAKE THIS A db VALUE
scan_rate = 10
while True:
for tag in tag_store:
try:
tag_store[tag].read(False)
tag_store[tag].read(configProperties['save_all'])
except:
print("ERROR EVALUATING {}".format(tag))
traceback.print_exc()
time.sleep(scan_rate)
time.sleep(configProperties['scan_rate'])
if __name__ == '__main__':
main()

View File

@@ -1,276 +0,0 @@
#! /usr/bin/env python
# Copyright (C) 2014 Gayner Technical Services Pty Ltd
from ctypes import *
# PLC TYPES
Unknow=0
PLC=1
SLC500=2
LGX=3
# EIP DATA TYPES
PLC_BIT=1
PLC_BIT_STRING=2
PLC_BYTE_STRING=3
PLC_INTEGER=4
PLC_TIMER=5
PLC_COUNTER=6
PLC_CONTROL=7
PLC_FLOATING=8
PLC_ARRAY=9
PLC_ADRESS=15
PLC_BCD=16
# LOGIX DATA TYPES
LGX_BOOL=0xC1
LGX_BITARRAY=0xD3
LGX_SINT=0xC2
LGX_INT=0xC3
LGX_DINT=0xC4
LGX_REAL=0xCA
class Eip_Session(Structure):
_fields_ = [
('sock',c_int),
('Session_Handle', c_uint),
('Sender_ContextL',c_int),
('Sender_ContextH',c_int),
('timeout', c_int),
('references', c_int),
('Data', c_void_p),
]
class Eip_Connection(Structure):
_fields_ = [
('Eip_Session', Eip_Session),
('references', c_int),
('Data', c_void_p),
('ConnectionSerialNumber', c_uint),
('OriginatorVendorID', c_uint),
('OriginatorSerialNumber', c_int),
('OT_ConnID', c_int),
('TO_ConnID', c_int),
('packet', c_short),
('Path_size', c_byte)
]
class Eip_PLC_Read(Structure):
_fields_ = [
('type', c_int),
('Varcount', c_int),
('totalise', c_int),
('elementsize', c_int),
('mask', c_uint),
]
class TuxEIPException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class TuxEIP:
def __init__(self, **kwargs):
self.__libpath = kwargs.get("libpath", "libtuxeip.dylib")
self.__tuxeip = CDLL(self.__libpath)
self.__tuxeip._cip_err_msg.restype = c_char_p
def __del__(self):
del self.__tuxeip
def OpenSession(self, slaveip_, slaveport_=44818, slavetimeout_=1000):
self.__tuxeip._OpenSession.restype = POINTER(Eip_Session)
# Convert params to C types
slaveip = c_char_p(slaveip_)
slaveport = c_int(slaveport_)
slavetimeout = c_int(slavetimeout_)
session = self.__tuxeip._OpenSession(slaveip, slaveport, slavetimeout)
#print self.__tuxeip._cip_err_msg, self.__tuxeip._cip_errno, self.__tuxeip._cip_ext_errno
if bool(session) == False:
raise TuxEIPException("Could not open session to " + str(slaveip) + ":" + str(slaveport))
return session
def RegisterSession(self, sess_):
self.__tuxeip._RegisterSession.restype = c_int
reg = self.__tuxeip._RegisterSession(sess_)
if reg != False:
raise TuxEIPException("Could not register session")
return reg
def ConnectPLCOverCNET(self, sess_, plctype_, priority_, timeoutticks_, connid_, conserial_,
vendorid_, serialnum_, timeoutmult_, rpi_, transport_, slavepath_):
# Convert params to C types
priority = c_byte(priority_)
timeoutticks = c_byte(timeoutticks_)
connid = c_uint(connid_)
conserial = c_ushort(conserial_)
vendorid = c_ushort(vendorid_)
serialnum = c_uint(serialnum_)
timeutmult = c_byte(timeoutmult_)
rpi = c_uint(rpi_)
transport = c_byte(transport_)
slavepath = c_char_p(slavepath_)
pathlength = len(slavepath_)
self.__tuxeip._ConnectPLCOverCNET.restype = POINTER(Eip_Connection)
connection = self.__tuxeip._ConnectPLCOverCNET(
sess_,
plctype_,
priority,
timeoutticks,
connid,
conserial,
vendorid,
serialnum,
timeutmult,
rpi,
transport,
slavepath,
pathlength
)
if bool(connection) == False:
raise TuxEIPException("Could not connect to CPU")
return connection
def ReadLgxData(self, sess_, conn_, var_, num_):
self.__tuxeip._ReadLgxData.restype = POINTER(Eip_PLC_Read)
readdata = self.__tuxeip._ReadLgxData(sess_, conn_, var_, num_)
if bool(readdata) == False:
raise TuxEIPException("Read data failed")
return readdata
def WriteLGXData(self, sess_, conn_, address_, datatype_, data_, num_ ):
if datatype_ == LGX_INT or datatype_ == LGX_BOOL or datatype_ == LGX_DINT or datatype_ == LGX_SINT:
data = c_int(data_)
elif datatype_ == LGX_REAL:
data = c_float(data_)
else:
raise TuxEIPException("Write data failed")
data = self.__tuxeip._WriteLgxData(sess_, conn_, address_, datatype_, byref(data), num_)
return data
def ReadLGXDataAsFloat(self, sess_, conn_, var_, num_):
data = self.ReadLgxData(sess_, conn_, var_, num_)
d = self.GetLGXValueAsFloat(data)
self.FreePLCRead(data)
return d
def ReadLGXDataAsInteger(self, sess_, conn_, var_, num_):
data = self.ReadLgxData(sess_, conn_, var_, num_)
d = self.GetLGXValueAsInteger(data)
self.FreePLCRead(data)
return d
def ReadPLCDataAsFloat(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_):
data = self.ReadPLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_)
d = self.PCCC_GetValueAsFloat(data)
self.FreePLCRead(data)
return d
def ReadPLCDataAsInteger(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_):
data = self.ReadPLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_)
d = self.PCCC_GetValueAsInteger(data)
self.FreePLCRead(data)
return d
def ReadPLCData(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_):
self.__tuxeip._ReadPLCData.restype = POINTER(Eip_PLC_Read)
readdata = self.__tuxeip._ReadPLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_,
tns_, address_, number_)
if bool(readdata) == False:
raise TuxEIPException("Read data failed")
return readdata
def GetLGXValueAsFloat(self, readdata_):
if bool(readdata_) == False:
return None
self.__tuxeip._GetLGXValueAsFloat.restype = c_float
values = []
for i in range(0, readdata_.contents.Varcount):
v = self.__tuxeip._GetLGXValueAsFloat(readdata_, i)
values.append(v)
return values
def GetLGXValueAsInteger(self, readdata_):
if bool(readdata_) == False:
return None
self.__tuxeip._GetLGXValueAsInteger.restype = c_int
values = []
for i in range(0, readdata_.contents.Varcount):
v = self.__tuxeip._GetLGXValueAsInteger(readdata_, i)
values.append(v)
return values
def PCCC_GetValueAsFloat(self, readdata_):
if bool(readdata_) == False:
return None
self.__tuxeip._PCCC_GetValueAsFloat.restype = c_float
values = []
for i in range(0, readdata_.contents.Varcount):
v = self.__tuxeip._PCCC_GetValueAsFloat(readdata_, i)
values.append(v)
return values
def PCCC_GetValueAsInteger(self, readdata_):
if bool(readdata_) == False:
return None
self.__tuxeip._PCCC_GetValueAsInteger.restype = c_int
values = []
for i in range(0, readdata_.contents.Varcount):
v = self.__tuxeip._PCCC_GetValueAsInteger(readdata_, i)
values.append(v)
return values
def WritePLCData(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, datatype_, data_, number_):
if datatype_ == PLC_INTEGER:
data = c_int(data_)
elif datatype_ == PLC_FLOATING:
data = c_float(data_)
else:
raise TuxEIPException("Variable type not supported" + str(datatype_))
result = self.__tuxeip._WritePLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_,
tns_, address_, datatype_, byref(data), number_)
return result
def Forward_Close(self, conn_):
self.__tuxeip._Forward_Close(conn_)
def UnRegisterSession(self, sess_):
self.__tuxeip._UnRegisterSession(sess_)
def CloseSession(self, sess_):
self.__tuxeip.CloseSession(sess_)
def FreePLCRead(self, data_):
self.__tuxeip._FreePLCRead(data_)

Binary file not shown.

View File

@@ -25,23 +25,23 @@ if app.locals.DB_TYPE == 'MySQL'
database: 'poconsole'
app.locals.pool = mysql.createPool(db_config)
handleDisconnect = ->
console.log 'Handling db disconnect gracefully'
app.locals.db = mysql.createConnection(db_config)
app.locals.db.connect (err) ->
if err
console.log 'error when connecting to db:', err
setTimeout handleDisconnect, 2000
return
app.locals.db.on 'error', (err) ->
console.log 'db error', err
if err.code == 'PROTOCOL_CONNECTION_LOST'
handleDisconnect()
else
throw err
return
return
handleDisconnect()
# handleDisconnect = ->
# console.log 'Handling db disconnect gracefully'
# app.locals.db = mysql.createConnection(db_config)
# app.locals.db.connect (err) ->
# if err
# console.log 'error when connecting to db:', err
# setTimeout handleDisconnect, 2000
# return
# app.locals.db.on 'error', (err) ->
# console.log 'db error', err
# if err.code == 'PROTOCOL_CONNECTION_LOST'
# handleDisconnect()
# else
# throw err
# return
# return
# handleDisconnect()
else
fns = require('./functions_SQLite.coffee')

View File

@@ -1,12 +1,13 @@
CREATE DATABASE poconsole;
CREATE TABLE IF NOT EXISTS poconsole.tag_classes(
USE poconsole;
CREATE TABLE IF NOT EXISTS tag_classes(
id int(11) NOT NULL AUTO_INCREMENT,
tag_class varchar(64),
description varchar(64),
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS poconsole.tags(
CREATE TABLE IF NOT EXISTS tags(
id int(11) NOT NULL AUTO_INCREMENT,
name varchar(128),
class int(11),
@@ -24,7 +25,7 @@ CREATE TABLE IF NOT EXISTS poconsole.tags(
);
CREATE TABLE IF NOT EXISTS poconsole.tag_vals(
CREATE TABLE IF NOT EXISTS tag_vals(
id int(11) NOT NULL AUTO_INCREMENT,
dtime datetime,
tagID int,
@@ -32,7 +33,7 @@ CREATE TABLE IF NOT EXISTS poconsole.tag_vals(
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS poconsole.config (
CREATE TABLE IF NOT EXISTS config (
id INT NOT NULL AUTO_INCREMENT,
parameter varchar(128),
val varchar(128),

View File

@@ -1,8 +1,18 @@
CREATE TABLE IF NOT EXISTS tag_classes(
id INTEGER PRIMARY KEY,
tag_class TEXT,
description TEXT
);
CREATE TABLE IF NOT EXISTS tags (
id INTEGER PRIMARY KEY,
tag TEXT,
name TEXT,
class TEXT,
tag TEXT,
description TEXT,
data_type TEXT,
change_threshold REAL,
guarantee_sec INTEGER,
units TEXT,
minExpected REAL,
maxExpected REAL,
@@ -10,11 +20,11 @@ CREATE TABLE IF NOT EXISTS tags (
deleted INTEGER DEFAULT 0
);
CREATE TABLE IF NOT EXISTS vals (
CREATE TABLE IF NOT EXISTS tag_vals (
id INTEGER PRIMARY KEY,
tagID INTEGER,
val REAL,
dateAdded TIMESTAMP DEFAULT CURRENT_TIMESTAMP
dtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS config (
@@ -23,3 +33,9 @@ CREATE TABLE IF NOT EXISTS config (
val TEXT,
dateAdded TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO tag_classes (id, tag_class, description) VALUES (1, 'stroke', 'Stroke Information');
INSERT INTO tag_classes (id, tag_class, description) VALUES (2, 'history', 'Historical Data');
INSERT INTO tag_classes (id, tag_class, description) VALUES (3, 'gaugeoff', 'Gauge Off Data');
INSERT INTO tag_classes (id, tag_class, description) VALUES (4, 'welltest', 'Well Test Data');
INSERT INTO tag_classes (id, tag_class, description) VALUES (5, 'custom', 'Custom tags');

View File

@@ -35,131 +35,137 @@ sqlite_to_dString = (sqliteDate) ->
null
getAllTags = (callback) ->
query = 'SELECT * FROM tags WHERE deleted = 0'
req.app.locals.db.query query, (err, rows, fields) ->
if err
return callback(err, null)
console.log err
else
return callback(null, rows)
return
return
getAllTags = (pool, callback) ->
pool.getConnection (err, db)->
query = 'SELECT * FROM tags WHERE deleted = 0'
db.query query, (err, rows, fields) ->
db.release()
if err
return callback(err, null)
console.log err
else
return callback(null, rows)
undefined
exports.getAllTags = (req, res) ->
query = 'SELECT * FROM tags WHERE deleted = 0'
req.app.locals.db.query query, (err, rows, fields) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tags: rows
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT * FROM tags WHERE deleted = 0'
db.query query, (err, rows, fields) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tags: rows
undefined
exports.createTag = (req, res) ->
query = 'INSERT INTO tags (tagName, units, minExpected, maxExpected, vanityName, description) VALUES (?, ?, ?, ?, ?, ?)'
req.app.locals.db.query query, [req.body.tagName, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.vanityName, req.body.description], (err, results) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json status: 'OK'
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'INSERT INTO tags (tag, units, minExpected, maxExpected, name, description, class) VALUES (?, ?, ?, ?, ?, ?, 5)'
db.query query, [req.body.tag, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.name, req.body.description], (err, results) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json status: 'OK'
undefined
exports.getTag = (req, res) ->
query = 'SELECT * FROM tags WHERE id = ?'
req.app.locals.db.query query, [req.params.id], (err, rows) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tags: rows
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT * FROM tags WHERE id = ?'
db.query query, [req.params.id], (err, rows) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tags: rows
undefined
exports.updateTag = (req, res) ->
query = 'UPDATE tags set tagName = ?, units = ?, minExpected = ?, maxExpected = ?, vanityName = ?, description = ? WHERE id = ?'
req.app.locals.db.query query, [req.body.tagName, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.vanityName, req.body.description, req.body.id], (err, results) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'UPDATE tags set tag = ?, units = ?, minExpected = ?, maxExpected = ?, name = ?, description = ? WHERE id = ?'
db.query query, [req.body.tag, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.name, req.body.description, req.body.id], (err, results) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
undefined
exports.deleteTag = (req, res) ->
query = 'UPDATE tags SET deleted = 1 WHERE id = ?'
req.app.locals.db.query query, [req.params.tag], (err, results) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'UPDATE tags SET deleted = 1 WHERE id = ?'
db.query query, [req.params.tag], (err, results) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
undefined
exports.seriesTagValues = (req, res) ->
query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dateAdded >= DATE_SUB(NOW(), INTERVAL 1 HOUR)'
req.app.locals.db.query query, [parseInt(req.params.tag)], (err, rows) ->
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
res.json
status: 'OK'
tag: req.params.tag
vals: rows
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dateAdded >= DATE_SUB(NOW(), INTERVAL 1 HOUR)'
db.query query, [parseInt(req.params.tag)], (err, rows) ->
db.release()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
res.json
status: 'OK'
tag: req.params.tag
vals: rows
undefined
exports.seriesTagValuesBetween = (req, res) ->
query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dtime >= ? AND dtime <= ?'
req.app.locals.db.query query, [parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime)], (err, rows) ->
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
res.json
status: 'OK'
tag: req.params.tag
startDatetime: dString_to_sqlite(req.params.startDatetime)
endDatetime: dString_to_sqlite(req.params.endDatetime)
vals: rows
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dtime >= ? AND dtime <= ?'
db.query query, [parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime)], (err, rows) ->
db.release()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
res.json
status: 'OK'
tag: req.params.tag
startDatetime: dString_to_sqlite(req.params.startDatetime)
endDatetime: dString_to_sqlite(req.params.endDatetime)
vals: rows
undefined
createCSVrow = (header, dataRow) ->
i = header.indexOf(dataRow.vanityName)
csvRow = dataRow.id.toString() + ',' + dataRow.dateAdded + ','
i = header.indexOf(dataRow.name)
csvRow = dataRow.id.toString() + ',' + dataRow.dtime + ','
if i >= 0
j = 2
while j < header.length
@@ -173,133 +179,137 @@ createCSVrow = (header, dataRow) ->
return
exports.allDataCSV = (req, res) ->
query = 'SELECT v.id, t.vanityName, v.val, v.dateAdded FROM tags t JOIN tag_vals v ON t.id = v.tagID'
req.app.locals.db.query query, (err, rows) ->
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
getAllTags (err, tags) ->
if err
console.log err
else
csvString = ''
da = [
'id'
'DateAdded'
]
tagVanityNames = tags.map((t) ->
t.vanityName
)
h = da.concat(tagVanityNames)
console.log h
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + createCSVrow(h, rows[i])
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
return
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID'
db.query query, (err, rows) ->
db.release()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
getAllTags req.app.locals.pool, (err, tags) ->
if err
console.log err
else
csvString = ''
da = [
'id'
'DateAdded'
]
tagVanityNames = tags.map((t) ->
t.name
)
h = da.concat(tagVanityNames)
console.log h
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + createCSVrow(h, rows[i])
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
undefined
exports.seriesCSV = (req, res) ->
query = 'SELECT v.id, t.vanityName, v.val, v.dateAdded FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND v.dateAdded > DATETIME(\'now\', \'-1 HOUR\')'
req.app.locals.db.query query, [parseInt(req.params.tag)], (err, rows) ->
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
csvString = ''
h = [
'id'
'DateAdded'
rows[0].vanityName
]
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + [
rows[i].id
rows[i].dateAdded
rows[i].val
].join(',') + '\u000d'
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT v.id, t.name, v.val, v.dateAdded FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND v.dateAdded > DATETIME(\'now\', \'-1 HOUR\')'
db.query query, [parseInt(req.params.tag)], (err, rows) ->
db.release()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
csvString = ''
h = [
'id'
'DateAdded'
rows[0].vanityName
]
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + [
rows[i].id
rows[i].dateAdded
rows[i].val
].join(',') + '\u000d'
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
undefined
exports.seriesCSVBetween = (req, res) ->
query = 'SELECT v.id, t.vanityName, v.val, v.dateAdded FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND dateAdded >= DATETIME(?) AND dateAdded <= DATETIME(?)'
req.app.locals.db.query query, [parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime)], (err, rows) ->
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
csvString = ''
h = [
'id'
'DateAdded'
rows[0].vanityName
]
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + [
rows[i].id
rows[i].dateAdded
rows[i].val
].join(',') + '\u000d'
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND v.dtime >= ? AND v.dtime <= ?'
db.query query, [parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime)], (err, rows) ->
db.release()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
csvString = ''
h = [
'id'
'DateAdded'
rows[0].name
]
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + [
rows[i].id
rows[i].dtime
rows[i].val
].join(',') + '\u000d'
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
undefined
exports.latestValueSingleTag = (req, res) ->
query = 'SELECT * FROM tag_vals WHERE id = (SELECT MAX(id) FROM tag_vals WHERE tagID = ?)'
req.app.locals.db.query query, [req.params.tag], (err, rows) ->
console.log rows
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tag_val: rows[0]
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT * FROM tag_vals WHERE id = (SELECT MAX(id) FROM tag_vals WHERE tagID = ?)'
db.query query, [req.params.tag], (err, rows) ->
db.release()
console.log rows
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tag_val: rows[0]
undefined
exports.latestValueAllTags = (req, res) ->
query = 'SELECT t.tag as tagName, t.name as vanityName, t.units as units, t.id as t_id, t.minExpected as min, t.maxExpected as max, MAX(v.id) as v_id, v.val as val, v.dtime as dtime FROM tag_vals v JOIN tags t ON v.tagID = t.id WHERE t.deleted = 0 GROUP BY v.tagID'
req.app.locals.db.query query, [req.params.id], (err, rows) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
vals: rows
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT v1.id as id, v1.dtime as dtime, t.id as t_id, t.name as name, t.tag as tag, v1.val as val, t.units as units, t.description as description, t.minExpected as minExpected, t.maxExpected as maxExpected FROM tag_vals v1 LEFT JOIN tags t ON t.id = v1.tagID WHERE v1.id = (SELECT v2.id FROM tag_vals v2 WHERE v2.tagID = v1.tagID ORDER BY v2.id DESC LIMIT 1)'
db.query query, [req.params.id], (err, rows) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
vals: rows
undefined
exports.checkLoggerStatus = (req, res) ->
fs = require('fs')
@@ -332,49 +342,47 @@ exports.restartLogger = (req, res) ->
return
exports.getSetup = (req, res) ->
query = 'SELECT parameter, val, dateAdded FROM config GROUP BY parameter;'
req.app.locals.db.query query, [req.params.id], (err, rows) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
config: rows
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT parameter, val, dateAdded FROM config GROUP BY parameter;'
db.query query, [req.params.id], (err, rows) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
config: rows
undefined
exports.updateSetup = (req, res) ->
exec = require('child_process').exec
query = 'INSERT INTO config (parameter, val) VALUES (?, ?)';
req.app.locals.db.query query, [req.body.parameter, req.body.val], (err) ->
if err
console.log runErr: err
res.json
status: 'error'
message: err
query: query
else
res.redirect '/#/setup'
exec '/etc/init.d/loggers stop', (error, stdout, stderr) ->
if error
console.log
status: 'error'
message: error
query: query
setTimeout (->
exec '/etc/init.d/loggers start', (error, stdout, stderr) ->
if error
console.log
status: 'error'
message: error
query: query
return
return
), 5000
return
return
return
return
req.app.locals.pool.getConnection (err, db) ->
query = 'INSERT INTO config (parameter, val) VALUES (?, ?)';
db.query query, [req.body.parameter, req.body.val], (err) ->
db.release()
if err
console.log runErr: err
res.json
status: 'error'
message: err
query: query
else
res.redirect '/#/setup'
exec '/etc/init.d/loggers stop', (error, stdout, stderr) ->
if error
console.log
status: 'error'
message: error
query: query
setTimeout (->
exec '/etc/init.d/loggers start', (error, stdout, stderr) ->
if error
console.log
status: 'error'
message: error
query: query
), 5000
undefined

1
www/public/css/app.css Normal file
View File

@@ -0,0 +1 @@

View File

@@ -54,12 +54,12 @@ var sqlite_to_dString = function(sqliteDate){
};
var date_to_dString = function(inpDate){
var year = inpDate.getUTCFullYear().pad(4);
var month = (inpDate.getUTCMonth() + 1).pad(2);
var day = inpDate.getUTCDate().pad(2);
var hour = inpDate.getUTCHours().pad(2);
var min = inpDate.getUTCMinutes().pad(2);
var sec = inpDate.getUTCSeconds().pad(2);
var year = inpDate.getFullYear().pad(4);
var month = (inpDate.getMonth() + 1).pad(2);
var day = inpDate.getDate().pad(2);
var hour = inpDate.getHours().pad(2);
var min = inpDate.getMinutes().pad(2);
var sec = inpDate.getSeconds().pad(2);
return "".concat(year, month, day, "_", hour, min, sec);
};
@@ -195,8 +195,8 @@ tsCtrlrs.factory('tags',function($q, $http, $log){
var createTag = function(tag){
$http.post('/json/tag/add', {
tagName: tag.tagName,
vanityName: tag.vanityName,
tag: tag.tag,
name: tag.name,
units: tag.units,
minExpected: tag.minExpected,
maxExpected: tag.maxExpected,
@@ -210,8 +210,8 @@ tsCtrlrs.factory('tags',function($q, $http, $log){
$log.info("updateTag called with "+ JSON.stringify(tag));
$http.post('/json/tag/update', {
id: tag.id,
tagName: tag.tagName,
vanityName: tag.vanityName,
tag: tag.tag,
name: tag.name,
units: tag.units,
minExpected: tag.minExpected,
maxExpected: tag.maxExpected,
@@ -448,7 +448,7 @@ tsCtrlrs.controller('tagValsCtrl', function($scope, $route, $http, $routeParams,
if (data.status == "OK"){
$scope.data = data;
$scope.data.vals = $scope.data.vals.map(function(x){
return {id: x.id, tagID: x.tagID, val: x.val, dateAdded: new Date(x.dateAdded)};
return {id: x.id, tagID: x.tagID, val: x.val, dtime: new Date(x.dtime)};
});
$scope.error = false;
@@ -466,7 +466,7 @@ tsCtrlrs.controller('tagValsCtrl', function($scope, $route, $http, $routeParams,
],
axes: {
x: {
key: "dateAdded",
key: "dtime",
type: "date"
}
}

View File

@@ -21,14 +21,16 @@
<div ng-if="!error" class="container">
<div class="row">
<button ng-click="loadDashboard()" class="btn btn-large btn-success"><i class="fa fa-refresh"></i> Reload Dashboard</button>
<a href="/json/csv/all" class="btn btn-large btn-primary"><i class="fa fa-download"></i> Download All Data</a>
<div ng-repeat="val in vals">
<div class="col-md-4" style="height:200px; margin-bottom:40px;">
<just-gage id="{{val.vanityName}}" min='val.min' max='val.max' value='val.val' options="{label:val.units,title:val.vanityName, decimals:2, refreshAnimationType:'bounce', startAnimationType:'bounce'}"></just-gage>
<just-gage id="{{val.name}}" min='val.min' max='val.max' value='val.val' options="{label:val.units,title:val.name, decimals:2, refreshAnimationType:'bounce', startAnimationType:'bounce'}"></just-gage>
<div style="text-align:center">
<h5>{{ val.dtime | date: 'medium'}}</h5>
<a href="/#/tag/{{val.t_id}}" class="btn btn-large btn-primary"><i class="fa fa-line-chart"></i> View Data</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>

View File

@@ -57,7 +57,7 @@
<tr ng-repeat="val in data.vals">
<td>{{val.id}}</td>
<td>{{val.val}} {{tag.units}}</td>
<td>{{val.dateAdded | sqlite_to_local}}</td>
<td>{{val.dtime | date:'short'}}</td>
</tr>
</tbody>
</table>

View File

@@ -25,6 +25,7 @@
<script src="/bower_components/ngQuickDate/dist/ng-quick-date.min.js"></script>
<link rel="stylesheet" href="/bower_components/ngQuickDate/dist/ng-quick-date.css">
<link rel="stylesheet" href="/bower_components/ngQuickDate/dist/ng-quick-date-default-theme.css">
<link rel="stylesheet" href="/css/app.css">
<script src="/js/router.js"></script>
<script src="/js/controller.js"></script>
@@ -43,7 +44,7 @@
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" rel="home" href="/" title="Henry Pump">
<a class="navbar-brand" rel="home" href="/#/" title="Henry Pump">
<img style="max-width:100px; "src="/img/logo.png">
</a>
</div>