39 Commits

Author SHA1 Message Date
Patrick McDonagh
4a867430ac Add CRUD for devices 2016-04-26 17:06:41 -05:00
Patrick McDonagh
a277e47fd3 Add tag error fix, db optimization 2016-04-26 13:30:30 -05:00
Patrick McDonagh
30ec348d48 Add MySQL database model 2016-04-26 13:29:46 -05:00
Patrick McDonagh
8f262e1b23 Updated tag submodule. now includes analog and bit alarms 2016-04-20 12:15:20 -05:00
Patrick McDonagh
5a7d40088c fixed a few typos 2016-04-20 11:48:32 -05:00
Patrick McDonagh
4c8ff3f4a5 update tag submodule 2016-04-19 13:13:25 -05:00
Patrick McDonagh
c90bf70360 fix db create function 2016-04-19 13:12:53 -05:00
Patrick McDonagh
f324c33f6b changed to be location agnostic 2016-04-18 15:56:20 -05:00
Patrick McDonagh
0b1104bdeb Updated to store data_type 2016-04-18 14:54:14 -05:00
Patrick McDonagh
5bb8f1bdfd fixed pycomm_micro cip_base imports 2016-04-18 14:08:44 -05:00
Patrick McDonagh
f5ab8c21b3 fixed inner import 2016-04-18 14:02:46 -05:00
Patrick McDonagh
1182c9627d Fixed pycomm_micro import 2016-04-18 13:52:20 -05:00
Patrick McDonagh
0dbb19076b Updated Tag 2016-04-18 10:09:39 -05:00
Patrick McDonagh
9dadb25011 fixed .gitmodules 2016-04-18 09:56:17 -05:00
Patrick McDonagh
e0f48be5f0 Updated Tag 2016-04-18 09:08:50 -05:00
Patrick McDonagh
066da70b71 Fixed init script 2016-04-15 18:14:10 -05:00
Patrick McDonagh
eb69640a11 Updated tag submodule again 2016-04-15 18:12:29 -05:00
Patrick McDonagh
4bb40dc85d Updated Tag submodule 2016-04-15 18:10:22 -05:00
Patrick McDonagh
cb6f138859 Updated to work with tag submodule 2016-04-15 18:06:50 -05:00
Patrick McDonagh
ff5f3bdf8b Removed submodule micro800 2016-04-15 18:05:32 -05:00
Patrick McDonagh
96467142b9 Updated submodule micro800 2016-04-15 18:02:42 -05:00
Patrick McDonagh
b65f49fd69 Moved micro800 and tag modules to their own repos, included submodules 2016-04-15 18:00:21 -05:00
Patrick McDonagh
6fdc513386 Read was defaulting to False 2016-04-15 17:35:14 -05:00
Patrick McDonagh
61e0c4a5f5 val was used before assigned 2016-04-15 17:33:12 -05:00
Patrick McDonagh
8c5e328eb3 Use less plc-specific language 2016-04-15 17:29:56 -05:00
Patrick McDonagh
0295bb6d86 Allow for forcing send or skip of database, read() returns value 2016-04-15 17:24:49 -05:00
Patrick McDonagh
2cb7c0f8da Fix file names for git repo 2016-04-14 15:42:44 -05:00
Patrick McDonagh
723ae7494a Removed unused files, fixed dashboard 2016-04-14 12:58:51 -05:00
Patrick McDonagh
97538ebb70 Merge pull request #1 in POCLOUD/tag-server from MySQL to master
MySQL and SQLite working

* commit '667f6540773b6a556171c20a16996b32d334fc05':
  Able to switch back and forth between MySQL and SQLite
  Moved location to /root/tagserver
  MySQL working, need to retro SQLite
  Converted to CoffeeScript, data in MySQL
  Added connect statement to connect to db before trying to INSERT
  reading tags from mysql and writing values to mysql
  added Tag classes for MySQL and SQLite
  Updated gitIgnore
2016-04-13 22:19:04 +00:00
Patrick McDonagh
667f654077 Able to switch back and forth between MySQL and SQLite 2016-04-13 17:17:15 -05:00
Patrick McDonagh
83c63bde31 Moved location to /root/tagserver 2016-04-12 18:38:44 -05:00
Patrick McDonagh
c8baa20927 MySQL working, need to retro SQLite 2016-04-12 18:31:14 -05:00
Patrick McDonagh
c568c2dfdb Converted to CoffeeScript, data in MySQL 2016-04-12 12:12:10 -05:00
Patrick McDonagh
40a3a0d304 Added connect statement to connect to db before trying to INSERT 2016-04-11 15:48:17 -05:00
Patrick McDonagh
528b62bcbb reading tags from mysql and writing values to mysql 2016-04-07 17:06:55 -05:00
Patrick McDonagh
f0f94aaa0f added Tag classes for MySQL and SQLite 2016-04-07 14:39:12 -05:00
Patrick McDonagh
d9d83789c2 Updated gitIgnore 2016-04-07 14:35:42 -05:00
Patrick McDonagh
6294a28462 Merge branch 'develop'
* develop:
  removed node modules and bower components from develop branch

Conflicts:
	.gitignore
2016-03-23 09:57:30 -05:00
Patrick McDonagh
22650d7688 Remove bower components and node modules from the repo 2016-03-23 09:52:17 -05:00
49 changed files with 1882 additions and 4018 deletions

5
.gitignore vendored
View File

@@ -1,3 +1,6 @@
*.pyc
*/bower_components/*
*/node_modules/*
*/bower_components/*
*.log
sftp-config.json
.remote-sync.json

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "tag"]
path = tag
url = http://patrickjmcd@bitbucket.poconsole.net/scm/poconsole/tag.git

View File

@@ -1,14 +1,10 @@
{
"transport": "scp",
"uploadOnSave": true,
"transport": "ftp",
"uploadOnSave": false,
"useAtomicWrites": false,
"deleteLocal": false,
"hostname": "10.10.10.6",
"port": "22",
"target": "/home/pi/tagserver/",
"ignore": [
".remote-sync.json",
".git/**"
],
"username": "pi",
"password": "raspberry"
}
]
}

BIN
TagServer.mwb Normal file

Binary file not shown.

View File

@@ -1,8 +1,8 @@
#! /bin/sh
# /etc/init.d/loggers
# /etc/init.d/tagserver
### BEGIN INIT INFO
# Provides: loggers
# Provides: tagserver
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
@@ -17,22 +17,19 @@
case "$1" in
start)
echo "Starting loggers"
kill -9 $(cat /root/solar_ww.pid)
kill -9 $(cat /root/tagserver.pid)
# run application you want to start
#python /home/poconsole/src/dataLogger/alarmLogger.py &
#python /home/poconsole/src/dataLogger/dataLogger.py &
/usr/bin/python /home/poconsole/tagserver/python/tagserver_SQLite.py > /dev/null 2>&1 & echo $! > "/root/tagserver.pid"
/usr/bin/python /root/tag-server/tagserver_MySQL.py > /dev/null 2>&1 & echo $! > "/root/tagserver.pid"
;;
stop)
echo "Stopping loggers"
# kill application you want to stop
#killall python
kill -9 $(cat /root/tagserver.pid)
;;
*)
echo "Usage: /etc/init.d/loggers {start|stop}"
echo "Usage: /etc/init.d/tagserver {start|stop}"
exit 1
;;
esac

View File

@@ -1,40 +0,0 @@
#! /bin/sh
# /etc/init.d/website
### BEGIN INIT INFO
# Provides: website
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Simple script to start a program at boot
# Description: A simple script from www.stuffaboutcode.com which will start / stop a program a boot / shutdown.
### END INIT INFO
# If you want a command to always run, put it here
# Carry out specific functions when asked to by the system
case "$1" in
start)
echo "Starting website"
# run application you want to start
# /home/pi/runWebsite.sh
if [ $(ps aux | grep $USER | grep node | grep -v grep | wc -l | tr -s "\n") -eq 0 ]
then
export NODE_ENV=production
export PATH=/usr/local/bin:$PATH
forever start /home/poconsole/tagserver/www/app.js > /dev/null
fi
;;
stop)
echo "Stopping website"
# kill application you want to stop
forever stopall
;;
*)
echo "Usage: /etc/init.d/website {start|stop}"
exit 1
;;
esac
exit 0

18
mysql_cfg.pickle Normal file
View File

@@ -0,0 +1,18 @@
(dp0
S'host'
p1
S'127.0.0.1'
p2
sS'password'
p3
S'henrypump'
p4
sS'user'
p5
S'website'
p6
sS'database'
p7
S'poconsole'
p8
s.

11
pickle_mysql_config.py Normal file
View File

@@ -0,0 +1,11 @@
import pickle
mysql_cfg = {
'host': '127.0.0.1',
'user': 'website',
'password': 'henrypump',
'database': 'poconsole'
}
with open('mysql_cfg.pickle', 'wb') as pickleconfig:
pickle.dump(mysql_cfg, pickleconfig)

View File

@@ -1,94 +0,0 @@
from pycomm_micro.ab_comm.clx import Driver as u800Driver
import logging
import sys
def readMicroTag(addr, tag):
logging.basicConfig(
filename="u800Driver.log",
format="%(levelname)-10s %(asctime)s %(message)s",
level=logging.DEBUG
)
c = u800Driver()
if c.open(addr):
try:
v = c.read_tag(tag)
# print(v)
return v
except Exception as e:
err = c.get_status()
c.close()
print err
pass
c.close()
def getTagType(addr, tag):
logging.basicConfig(
filename="u800Driver.log",
format="%(levelname)-10s %(asctime)s %(message)s",
level=logging.DEBUG
)
c = u800Driver()
if c.open(addr):
try:
return c.read_tag(tag)[1]
except Exception as e:
err = c.get_status()
c.close()
print err
pass
c.close()
def writeMicroTag(addr, tag, val):
logging.basicConfig(
filename="u800Driver.log",
format="%(levelname)-10s %(asctime)s %(message)s",
level=logging.DEBUG
)
c = u800Driver()
if c.open(addr):
try:
#typ = getTagType(addr, tag)
cv = c.read_tag(tag)
wt = c.write_tag(tag, val, cv[1])
# print(wt)
return wt
except Exception as e:
err = c.get_status()
c.close()
print err
pass
c.close()
def readMicroTagList(addr, tList):
logging.basicConfig(
filename="u800Driver.log",
format="%(levelname)-10s %(asctime)s %(message)s",
level=logging.DEBUG
)
c = u800Driver()
if c.open(addr):
vals = []
try:
for t in tList:
v = c.read_tag(t)
vals.append({"tag":t,"val":v[0], "type":v[1]})
# print(v)
# print("{0} - {1}".format(t, v))
except Exception as e:
err = c.get_status()
c.close()
print err
pass
c.close()
return vals
if __name__ == '__main__':
if len(sys.argv) > 2:
print(readMicroTag(sys.argv[1], sys.argv[2]))
else:
print ("Did not pass a target and tag name.")

View File

@@ -1 +0,0 @@
pycomm.ab_comm.clx WARNING 2016-01-25 14:45:25,488 (5, 'forward_close returned False')

View File

@@ -1 +0,0 @@
__author__ = 'agostino'

Binary file not shown.

View File

@@ -1,2 +0,0 @@
__author__ = 'agostino'
import logging

View File

@@ -1,847 +0,0 @@
# -*- coding: utf-8 -*-
#
# clx.py - Ethernet/IP Client for Rockwell PLCs
#
#
# Copyright (c) 2014 Agostino Ruscito <ruscito@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from pycomm_micro.cip.cip_base import *
from pycomm_micro.common import setup_logger
import logging
class Driver(Base):
"""
This Ethernet/IP client is based on Rockwell specification. Please refer to the link below for details.
http://literature.rockwellautomation.com/idc/groups/literature/documents/pm/1756-pm020_-en-p.pdf
The following services have been implemented:
- Read Tag Service (0x4c)
- Read Tag Fragment Service (0x52)
- Write Tag Service (0x4d)
- Write Tag Fragment Service (0x53)
- Multiple Service Packet (0x0a)
The client has been successfully tested with the following PLCs:
- CompactLogix 5330ERM
- CompactLogix 5370
- ControlLogix 5572 and 1756-EN2T Module
"""
def __init__(self, debug=False, filename=None):
if debug:
super(Driver, self).__init__(setup_logger('ab_comm.clx', logging.DEBUG, filename))
else:
super(Driver, self).__init__(setup_logger('ab_comm.clx', logging.INFO, filename))
self._buffer = {}
self._get_template_in_progress = False
self.__version__ = '0.2'
def get_last_tag_read(self):
""" Return the last tag read by a multi request read
:return: A tuple (tag name, value, type)
"""
return self._last_tag_read
def get_last_tag_write(self):
""" Return the last tag write by a multi request write
:return: A tuple (tag name, 'GOOD') if the write was successful otherwise (tag name, 'BAD')
"""
return self._last_tag_write
def _parse_instance_attribute_list(self, start_tag_ptr, status):
""" extract the tags list from the message received
:param start_tag_ptr: The point in the message string where the tag list begin
:param status: The status of the message receives
"""
tags_returned = self._reply[start_tag_ptr:]
tags_returned_length = len(tags_returned)
idx = 0
instance = 0
count = 0
try:
while idx < tags_returned_length:
instance = unpack_dint(tags_returned[idx:idx+4])
idx += 4
tag_length = unpack_uint(tags_returned[idx:idx+2])
idx += 2
tag_name = tags_returned[idx:idx+tag_length]
idx += tag_length
symbol_type = unpack_uint(tags_returned[idx:idx+2])
idx += 2
count += 1
self._tag_list.append({'instance_id': instance,
'tag_name': tag_name,
'symbol_type': symbol_type})
except Exception as e:
raise DataError(e)
if status == SUCCESS:
self._last_instance = -1
elif status == 0x06:
self._last_instance = instance + 1
else:
self._status = (1, 'unknown status during _parse_tag_list')
self._last_instance = -1
def _parse_structure_makeup_attributes(self, start_tag_ptr, status):
""" extract the tags list from the message received
:param start_tag_ptr: The point in the message string where the tag list begin
:param status: The status of the message receives
"""
self._buffer = {}
if status != SUCCESS:
self._buffer['Error'] = status
return
attribute = self._reply[start_tag_ptr:]
idx = 4
try:
if unpack_uint(attribute[idx:idx + 2]) == SUCCESS:
idx += 2
self._buffer['object_definition_size'] = unpack_dint(attribute[idx:idx + 4])
else:
self._buffer['Error'] = 'object_definition Error'
return
idx += 6
if unpack_uint(attribute[idx:idx + 2]) == SUCCESS:
idx += 2
self._buffer['structure_size'] = unpack_dint(attribute[idx:idx + 4])
else:
self._buffer['Error'] = 'structure Error'
return
idx += 6
if unpack_uint(attribute[idx:idx + 2]) == SUCCESS:
idx += 2
self._buffer['member_count'] = unpack_uint(attribute[idx:idx + 2])
else:
self._buffer['Error'] = 'member_count Error'
return
idx += 4
if unpack_uint(attribute[idx:idx + 2]) == SUCCESS:
idx += 2
self._buffer['structure_handle'] = unpack_uint(attribute[idx:idx + 2])
else:
self._buffer['Error'] = 'structure_handle Error'
return
return self._buffer
except Exception as e:
raise DataError(e)
def _parse_template(self, start_tag_ptr, status):
""" extract the tags list from the message received
:param start_tag_ptr: The point in the message string where the tag list begin
:param status: The status of the message receives
"""
tags_returned = self._reply[start_tag_ptr:]
bytes_received = len(tags_returned)
self._buffer += tags_returned
if status == SUCCESS:
self._get_template_in_progress = False
elif status == 0x06:
self._byte_offset += bytes_received
else:
self._status = (1, 'unknown status {0} during _parse_template'.format(status))
self.logger.warning(self._status)
self._last_instance = -1
def _parse_fragment(self, start_ptr, status):
""" parse the fragment returned by a fragment service.
:param start_ptr: Where the fragment start within the replay
:param status: status field used to decide if keep parsing or stop
"""
try:
data_type = unpack_uint(self._reply[start_ptr:start_ptr+2])
fragment_returned = self._reply[start_ptr+2:]
except Exception as e:
raise DataError(e)
fragment_returned_length = len(fragment_returned)
idx = 0
while idx < fragment_returned_length:
try:
typ = I_DATA_TYPE[data_type]
value = UNPACK_DATA_FUNCTION[typ](fragment_returned[idx:idx+DATA_FUNCTION_SIZE[typ]])
idx += DATA_FUNCTION_SIZE[typ]
except Exception as e:
raise DataError(e)
self._tag_list.append((self._last_position, value))
self._last_position += 1
if status == SUCCESS:
self._byte_offset = -1
elif status == 0x06:
self._byte_offset += fragment_returned_length
else:
self._status = (2, 'unknown status during _parse_fragment')
self._byte_offset = -1
def _parse_multiple_request_read(self, tags):
""" parse the message received from a multi request read:
For each tag parsed, the information extracted includes the tag name, the value read and the data type.
Those information are appended to the tag list as tuple
:return: the tag list
"""
offset = 50
position = 50
try:
number_of_service_replies = unpack_uint(self._reply[offset:offset+2])
tag_list = []
for index in range(number_of_service_replies):
position += 2
start = offset + unpack_uint(self._reply[position:position+2])
general_status = unpack_usint(self._reply[start+2:start+3])
if general_status == 0:
data_type = unpack_uint(self._reply[start+4:start+6])
value_begin = start + 6
value_end = value_begin + DATA_FUNCTION_SIZE[I_DATA_TYPE[data_type]]
value = self._reply[value_begin:value_end]
self._last_tag_read = (tags[index], UNPACK_DATA_FUNCTION[I_DATA_TYPE[data_type]](value),
I_DATA_TYPE[data_type])
else:
self._last_tag_read = (tags[index], None, None)
tag_list.append(self._last_tag_read)
return tag_list
except Exception as e:
raise DataError(e)
def _parse_multiple_request_write(self, tags):
""" parse the message received from a multi request writ:
For each tag parsed, the information extracted includes the tag name and the status of the writing.
Those information are appended to the tag list as tuple
:return: the tag list
"""
offset = 50
position = 50
try:
number_of_service_replies = unpack_uint(self._reply[offset:offset+2])
tag_list = []
for index in range(number_of_service_replies):
position += 2
start = offset + unpack_uint(self._reply[position:position+2])
general_status = unpack_usint(self._reply[start+2:start+3])
if general_status == 0:
self._last_tag_write = (tags[index] + ('GOOD',))
else:
self._last_tag_write = (tags[index] + ('BAD',))
tag_list.append(self._last_tag_write)
return tag_list
except Exception as e:
raise DataError(e)
def _check_reply(self):
""" check the replayed message for error
"""
self._more_packets_available = False
try:
if self._reply is None:
self._status = (3, '%s without reply' % REPLAY_INFO[unpack_dint(self._message[:2])])
return False
# Get the type of command
typ = unpack_uint(self._reply[:2])
# Encapsulation status check
if unpack_dint(self._reply[8:12]) != SUCCESS:
self._status = (3, "{0} reply status:{1}".format(REPLAY_INFO[typ],
SERVICE_STATUS[unpack_dint(self._reply[8:12])]))
return False
# Command Specific Status check
if typ == unpack_uint(ENCAPSULATION_COMMAND["send_rr_data"]):
status = unpack_usint(self._reply[42:43])
if status != SUCCESS:
self._status = (3, "send_rr_data reply:{0} - Extend status:{1}".format(
SERVICE_STATUS[status], get_extended_status(self._reply, 42)))
return False
else:
return True
elif typ == unpack_uint(ENCAPSULATION_COMMAND["send_unit_data"]):
status = unpack_usint(self._reply[48:49])
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Read Tag Fragmented"]:
self._parse_fragment(50, status)
return True
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Get Instance Attributes List"]:
self._parse_instance_attribute_list(50, status)
return True
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Get Attributes"]:
self._parse_structure_makeup_attributes(50, status)
return True
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Read Template"] and \
self._get_template_in_progress:
self._parse_template(50, status)
return True
if status == 0x06:
self._status = (3, "Insufficient Packet Space")
self._more_packets_available = True
elif status != SUCCESS:
self._status = (3, "send_unit_data reply:{0} - Extend status:{1}".format(
SERVICE_STATUS[status], get_extended_status(self._reply, 48)))
return False
else:
return True
return True
except Exception as e:
raise DataError(e)
def read_tag(self, tag):
""" read tag from a connected plc
Possible combination can be passed to this method:
- ('Counts') a single tag name
- (['ControlWord']) a list with one tag or many
- (['parts', 'ControlWord', 'Counts'])
At the moment there is not a strong validation for the argument passed. The user should verify
the correctness of the format passed.
:return: None is returned in case of error otherwise the tag list is returned
"""
multi_requests = False
if isinstance(tag, list):
multi_requests = True
if not self._target_is_connected:
if not self.forward_open():
self._status = (6, "Target did not connected. read_tag will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. read_tag will not be executed.")
# multi_requests = False
if multi_requests:
rp_list = []
for t in tag:
rp = create_tag_rp(t, multi_requests=True)
if rp is None:
self._status = (6, "Cannot create tag {0} request packet. read_tag will not be executed.".format(tag))
raise DataError("Cannot create tag {0} request packet. read_tag will not be executed.".format(tag))
else:
rp_list.append(chr(TAG_SERVICES_REQUEST['Read Tag']) + rp + pack_uint(1))
message_request = build_multiple_service(rp_list, Base._get_sequence())
else:
rp = create_tag_rp(tag)
if rp is None:
self._status = (6, "Cannot create tag {0} request packet. read_tag will not be executed.".format(tag))
return None
else:
# Creating the Message Request Packet
message_request = [
pack_uint(Base._get_sequence()),
chr(TAG_SERVICES_REQUEST['Read Tag']), # the Request Service
chr(len(rp) / 2), # the Request Path Size length in word
rp, # the request path
pack_uint(1)
]
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,
)) is None:
raise DataError("send_unit_data returned not valid data")
if multi_requests:
return self._parse_multiple_request_read(tag)
else:
# Get the data type
data_type = unpack_uint(self._reply[50:52])
# print I_DATA_TYPE[data_type]
try:
return UNPACK_DATA_FUNCTION[I_DATA_TYPE[data_type]](self._reply[52:]), I_DATA_TYPE[data_type]
except Exception as e:
raise DataError(e)
def read_array(self, tag, counts):
""" read array of atomic data type from a connected plc
At the moment there is not a strong validation for the argument passed. The user should verify
the correctness of the format passed.
:param tag: the name of the tag to read
:param counts: the number of element to read
:return: None is returned in case of error otherwise the tag list is returned
"""
if not self._target_is_connected:
if not self.forward_open():
self._status = (7, "Target did not connected. read_tag will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. read_tag will not be executed.")
self._byte_offset = 0
self._last_position = 0
self._tag_list = []
while self._byte_offset != -1:
rp = create_tag_rp(tag)
if rp is None:
self._status = (7, "Cannot create tag {0} request packet. read_tag will not be executed.".format(tag))
return None
else:
# Creating the Message Request Packet
message_request = [
pack_uint(Base._get_sequence()),
chr(TAG_SERVICES_REQUEST["Read Tag Fragmented"]), # the Request Service
chr(len(rp) / 2), # the Request Path Size length in word
rp, # the request path
pack_uint(counts),
pack_dint(self._byte_offset)
]
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,
)) is None:
raise DataError("send_unit_data returned not valid data")
return self._tag_list
def write_tag(self, tag, value=None, typ=None):
""" write tag/tags from a connected plc
Possible combination can be passed to this method:
- ('tag name', Value, data type) as single parameters or inside a tuple
- ([('tag name', Value, data type), ('tag name2', Value, data type)]) as array of tuples
At the moment there is not a strong validation for the argument passed. The user should verify
the correctness of the format passed.
The type accepted are:
- BOOL
- SINT
- INT'
- DINT
- REAL
- LINT
- BYTE
- WORD
- DWORD
- LWORD
:param tag: tag name, or an array of tuple containing (tag name, value, data type)
:param value: the value to write or none if tag is an array of tuple or a tuple
:param typ: the type of the tag to write or none if tag is an array of tuple or a tuple
:return: None is returned in case of error otherwise the tag list is returned
"""
multi_requests = False
if isinstance(tag, list):
multi_requests = True
if not self._target_is_connected:
if not self.forward_open():
self._status = (8, "Target did not connected. write_tag will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. write_tag will not be executed.")
if multi_requests:
rp_list = []
tag_to_remove = []
idx = 0
for name, value, typ in tag:
# Create the request path to wrap the tag name
rp = create_tag_rp(name, multi_requests=True)
if rp is None:
self._status = (8, "Cannot create tag{0} req. packet. write_tag will not be executed".format(tag))
return None
else:
try: # Trying to add the rp to the request path list
val = PACK_DATA_FUNCTION[typ](value)
rp_list.append(
chr(TAG_SERVICES_REQUEST['Write Tag'])
+ rp
+ pack_uint(S_DATA_TYPE[typ])
+ pack_uint(1)
+ val
)
idx += 1
except (LookupError, struct.error) as e:
self._status = (8, "Tag:{0} type:{1} removed from write list. Error:{2}.".format(name, typ, e))
# The tag in idx position need to be removed from the rp list because has some kind of error
tag_to_remove.append(idx)
# Remove the tags that have not been inserted in the request path list
for position in tag_to_remove:
del tag[position]
# Create the message request
message_request = build_multiple_service(rp_list, Base._get_sequence())
else:
if isinstance(tag, tuple):
name, value, typ = tag
else:
name = tag
rp = create_tag_rp(name)
if rp is None:
self._status = (8, "Cannot create tag {0} request packet. write_tag will not be executed.".format(tag))
self.logger.warning(self._status)
return None
else:
# Creating the Message Request Packet
message_request = [
pack_uint(Base._get_sequence()),
chr(TAG_SERVICES_REQUEST["Write Tag"]), # the Request Service
chr(len(rp) / 2), # the Request Path Size length in word
rp, # the request path
pack_uint(S_DATA_TYPE[typ]), # data type
pack_uint(1), # Add the number of tag to write
PACK_DATA_FUNCTION[typ](value)
]
ret_val = self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,
)
)
if multi_requests:
return self._parse_multiple_request_write(tag)
else:
if ret_val is None:
raise DataError("send_unit_data returned not valid data")
return ret_val
def write_array(self, tag, data_type, values):
""" write array of atomic data type from a connected plc
At the moment there is not a strong validation for the argument passed. The user should verify
the correctness of the format passed.
:param tag: the name of the tag to read
:param data_type: the type of tag to write
:param values: the array of values to write
"""
if not isinstance(values, list):
self._status = (9, "A list of tags must be passed to write_array.")
self.logger.warning(self._status)
raise DataError("A list of tags must be passed to write_array.")
if not self._target_is_connected:
if not self.forward_open():
self._status = (9, "Target did not connected. write_array will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. write_array will not be executed.")
array_of_values = ""
byte_size = 0
byte_offset = 0
for i, value in enumerate(values):
array_of_values += PACK_DATA_FUNCTION[data_type](value)
byte_size += DATA_FUNCTION_SIZE[data_type]
if byte_size >= 450 or i == len(values)-1:
# create the message and send the fragment
rp = create_tag_rp(tag)
if rp is None:
self._status = (9, "Cannot create tag {0} request packet. \
write_array will not be executed.".format(tag))
return None
else:
# Creating the Message Request Packet
message_request = [
pack_uint(Base._get_sequence()),
chr(TAG_SERVICES_REQUEST["Write Tag Fragmented"]), # the Request Service
chr(len(rp) / 2), # the Request Path Size length in word
rp, # the request path
pack_uint(S_DATA_TYPE[data_type]), # Data type to write
pack_uint(len(values)), # Number of elements to write
pack_dint(byte_offset),
array_of_values # Fragment of elements to write
]
byte_offset += byte_size
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,
)) is None:
raise DataError("send_unit_data returned not valid data")
array_of_values = ""
byte_size = 0
def _get_instance_attribute_list_service(self):
""" Step 1: Finding user-created controller scope tags in a Logix5000 controller
This service returns instance IDs for each created instance of the symbol class, along with a list
of the attribute data associated with the requested attribute
"""
try:
if not self._target_is_connected:
if not self.forward_open():
self._status = (10, "Target did not connected. get_tag_list will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. get_tag_list will not be executed.")
self._last_instance = 0
self._get_template_in_progress = True
while self._last_instance != -1:
# Creating the Message Request Packet
message_request = [
pack_uint(Base._get_sequence()),
chr(TAG_SERVICES_REQUEST['Get Instance Attributes List']), # STEP 1
# the Request Path Size length in word
chr(3),
# Request Path ( 20 6B 25 00 Instance )
CLASS_ID["8-bit"], # Class id = 20 from spec 0x20
CLASS_CODE["Symbol Object"], # Logical segment: Symbolic Object 0x6B
INSTANCE_ID["16-bit"], # Instance Segment: 16 Bit instance 0x25
'\x00',
pack_uint(self._last_instance), # The instance
# Request Data
pack_uint(2), # Number of attributes to retrieve
pack_uint(1), # Attribute 1: Symbol name
pack_uint(2) # Attribute 2: Symbol type
]
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,
)) is None:
raise DataError("send_unit_data returned not valid data")
self._get_template_in_progress = False
except Exception as e:
raise DataError(e)
def _get_structure_makeup(self, instance_id):
"""
get the structure makeup for a specific structure
"""
if not self._target_is_connected:
if not self.forward_open():
self._status = (10, "Target did not connected. get_tag_list will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. get_tag_list will not be executed.")
message_request = [
pack_uint(self._get_sequence()),
chr(TAG_SERVICES_REQUEST['Get Attributes']),
chr(3), # Request Path ( 20 6B 25 00 Instance )
CLASS_ID["8-bit"], # Class id = 20 from spec 0x20
CLASS_CODE["Template Object"], # Logical segment: Template Object 0x6C
INSTANCE_ID["16-bit"], # Instance Segment: 16 Bit instance 0x25
'\x00',
pack_uint(instance_id),
pack_uint(4), # Number of attributes
pack_uint(4), # Template Object Definition Size UDINT
pack_uint(5), # Template Structure Size UDINT
pack_uint(2), # Template Member Count UINT
pack_uint(1) # Structure Handle We can use this to read and write UINT
]
if self.send_unit_data(
build_common_packet_format(DATA_ITEM['Connected'],
''.join(message_request), ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,)) is None:
raise DataError("send_unit_data returned not valid data")
return self._buffer
def _read_template(self, instance_id, object_definition_size):
""" get a list of the tags in the plc
"""
if not self._target_is_connected:
if not self.forward_open():
self._status = (10, "Target did not connected. get_tag_list will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. get_tag_list will not be executed.")
self._byte_offset = 0
self._buffer = ""
self._get_template_in_progress = True
try:
while self._get_template_in_progress:
# Creating the Message Request Packet
message_request = [
pack_uint(self._get_sequence()),
chr(TAG_SERVICES_REQUEST['Read Template']),
chr(3), # Request Path ( 20 6B 25 00 Instance )
CLASS_ID["8-bit"], # Class id = 20 from spec 0x20
CLASS_CODE["Template Object"], # Logical segment: Template Object 0x6C
INSTANCE_ID["16-bit"], # Instance Segment: 16 Bit instance 0x25
'\x00',
pack_uint(instance_id),
pack_dint(self._byte_offset), # Offset
pack_uint(((object_definition_size * 4)-23) - self._byte_offset)
]
if not self.send_unit_data(
build_common_packet_format(DATA_ITEM['Connected'], ''.join(message_request),
ADDRESS_ITEM['Connection Based'], addr_data=self._target_cid,)):
raise DataError("send_unit_data returned not valid data")
self._get_template_in_progress = False
return self._buffer
except Exception as e:
raise DataError(e)
def _isolating_user_tag(self):
try:
lst = self._tag_list
self._tag_list = []
for tag in lst:
if tag['tag_name'].find(':') != -1 or tag['tag_name'].find('__') != -1:
continue
if tag['symbol_type'] & 0b0001000000000000:
continue
dimension = tag['symbol_type'] & 0b0110000000000000 >> 13
template_instance_id = tag['symbol_type'] & 0b0000111111111111
if tag['symbol_type'] & 0b1000000000000000 :
tag_type = 'struct'
data_type = 'user-created'
self._tag_list.append({'instance_id': tag['instance_id'],
'template_instance_id': template_instance_id,
'tag_name': tag['tag_name'],
'dim': dimension,
'tag_type': tag_type,
'data_type': data_type,
'template': {},
'udt': {}})
else:
tag_type = 'atomic'
data_type = I_DATA_TYPE[template_instance_id]
self._tag_list.append({'instance_id': tag['instance_id'],
'tag_name': tag['tag_name'],
'dim': dimension,
'tag_type': tag_type,
'data_type': data_type})
except Exception as e:
raise DataError(e)
def _parse_udt_raw(self, tag):
try:
buff = self._read_template(tag['template_instance_id'], tag['template']['object_definition_size'])
member_count = tag['template']['member_count']
names = buff.split('\00')
lst = []
tag['udt']['name'] = 'Not an user defined structure'
for name in names:
if len(name) > 1:
if name.find(';') != -1:
tag['udt']['name'] = name[:name.find(';')]
elif name.find('ZZZZZZZZZZ') != -1:
continue
elif name.isalpha():
lst.append(name)
else:
continue
tag['udt']['internal_tags'] = lst
type_list = []
for i in xrange(member_count):
# skip member 1
if i != 0:
array_size = unpack_uint(buff[:2])
try:
data_type = I_DATA_TYPE[unpack_uint(buff[2:4])]
except Exception:
data_type = "None"
offset = unpack_dint(buff[4:8])
type_list.append((array_size, data_type, offset))
buff = buff[8:]
tag['udt']['data_type'] = type_list
except Exception as e:
raise DataError(e)
def get_tag_list(self):
self._tag_list = []
# Step 1
self._get_instance_attribute_list_service()
# Step 2
self._isolating_user_tag()
# Step 3
for tag in self._tag_list:
if tag['tag_type'] == 'struct':
tag['template'] = self._get_structure_makeup(tag['template_instance_id'])
for idx, tag in enumerate(self._tag_list):
# print (tag)
if tag['tag_type'] == 'struct':
self._parse_udt_raw(tag)
# Step 4
return self._tag_list

Binary file not shown.

View File

@@ -1,446 +0,0 @@
# -*- coding: utf-8 -*-
#
# clx.py - Ethernet/IP Client for Rockwell PLCs
#
#
# Copyright (c) 2014 Agostino Ruscito <ruscito@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from pycomm_micro.cip.cip_base import *
from pycomm_micro.common import setup_logger
import re
import logging
import math
def parse_tag(tag):
t = re.search(r"(?P<file_type>[CT])(?P<file_number>\d{1,3})"
r"(:)(?P<element_number>\d{1,3})"
r"(.)(?P<sub_element>ACC|PRE|EN|DN|TT|CU|CD|DN|OV|UN|UA)", tag, flags=re.IGNORECASE)
if t:
if (1 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 255):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': t.group('element_number'),
'sub_element': PCCC_CT[t.group('sub_element').upper()],
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 3}
t = re.search(r"(?P<file_type>[FBN])(?P<file_number>\d{1,3})"
r"(:)(?P<element_number>\d{1,3})"
r"(/(?P<sub_element>\d{1,2}))?",
tag, flags=re.IGNORECASE)
if t:
if t.group('sub_element') is not None:
if (1 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 255) \
and (0 <= int(t.group('sub_element')) <= 15):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': t.group('element_number'),
'sub_element': t.group('sub_element'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 3}
else:
if (1 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 255):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': t.group('element_number'),
'sub_element': t.group('sub_element'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 2}
t = re.search(r"(?P<file_type>[IO])(:)(?P<file_number>\d{1,3})"
r"(.)(?P<element_number>\d{1,3})"
r"(/(?P<sub_element>\d{1,2}))?", tag, flags=re.IGNORECASE)
if t:
if t.group('sub_element') is not None:
if (0 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 255) \
and (0 <= int(t.group('sub_element')) <= 15):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': t.group('element_number'),
'sub_element': t.group('sub_element'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 3}
else:
if (0 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 255):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': t.group('element_number'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 2}
t = re.search(r"(?P<file_type>S)"
r"(:)(?P<element_number>\d{1,3})"
r"(/(?P<sub_element>\d{1,2}))?", tag, flags=re.IGNORECASE)
if t:
if t.group('sub_element') is not None:
if (0 <= int(t.group('element_number')) <= 255) \
and (0 <= int(t.group('sub_element')) <= 15):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': '2',
'element_number': t.group('element_number'),
'sub_element': t.group('sub_element'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 3}
else:
if 0 <= int(t.group('element_number')) <= 255:
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': '2',
'element_number': t.group('element_number'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 2}
t = re.search(r"(?P<file_type>B)(?P<file_number>\d{1,3})"
r"(/)(?P<element_number>\d{1,4})",
tag, flags=re.IGNORECASE)
if t:
if (1 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 4095):
bit_position = int(t.group('element_number'))
element_number = bit_position / 16
sub_element = bit_position - (element_number * 16)
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': element_number,
'sub_element': sub_element,
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 3}
return False, tag
class Driver(Base):
"""
SLC/PLC_5 Implementation
"""
def __init__(self, debug=False, filename=None):
if debug:
super(Driver, self).__init__(setup_logger('ab_comm.slc', logging.DEBUG, filename))
else:
super(Driver, self).__init__(setup_logger('ab_comm.slc', logging.INFO, filename))
self.__version__ = '0.1'
self._last_sequence = 0
def _check_reply(self):
"""
check the replayed message for error
"""
self._more_packets_available = False
try:
if self._reply is None:
self._status = (3, '%s without reply' % REPLAY_INFO[unpack_dint(self._message[:2])])
return False
# Get the type of command
typ = unpack_uint(self._reply[:2])
# Encapsulation status check
if unpack_dint(self._reply[8:12]) != SUCCESS:
self._status = (3, "{0} reply status:{1}".format(REPLAY_INFO[typ],
SERVICE_STATUS[unpack_dint(self._reply[8:12])]))
return False
# Command Specific Status check
if typ == unpack_uint(ENCAPSULATION_COMMAND["send_rr_data"]):
status = unpack_usint(self._reply[42:43])
if status != SUCCESS:
self._status = (3, "send_rr_data reply:{0} - Extend status:{1}".format(
SERVICE_STATUS[status], get_extended_status(self._reply, 42)))
return False
else:
return True
elif typ == unpack_uint(ENCAPSULATION_COMMAND["send_unit_data"]):
status = unpack_usint(self._reply[48:49])
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Read Tag Fragmented"]:
self._parse_fragment(50, status)
return True
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Get Instance Attributes List"]:
self._parse_tag_list(50, status)
return True
if status == 0x06:
self._status = (3, "Insufficient Packet Space")
self._more_packets_available = True
elif status != SUCCESS:
self._status = (3, "send_unit_data reply:{0} - Extend status:{1}".format(
SERVICE_STATUS[status], get_extended_status(self._reply, 48)))
return False
else:
return True
return True
except Exception as e:
raise DataError(e)
def read_tag(self, tag, n=1):
""" read tag from a connected plc
Possible combination can be passed to this method:
print c.read_tag('F8:0', 3) return a list of 3 registers starting from F8:0
print c.read_tag('F8:0') return one value
It is possible to read status bit
:return: None is returned in case of error
"""
res = parse_tag(tag)
if not res[0]:
self._status = (1000, "Error parsing the tag passed to read_tag({0},{1})".format(tag, n))
self.logger.warning(self._status)
raise DataError("Error parsing the tag passed to read_tag({0},{1})".format(tag, n))
bit_read = False
bit_position = 0
sub_element = 0
if int(res[2]['address_field'] == 3):
bit_read = True
bit_position = int(res[2]['sub_element'])
if not self._target_is_connected:
if not self.forward_open():
self._status = (5, "Target did not connected. read_tag will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. read_tag will not be executed.")
data_size = PCCC_DATA_SIZE[res[2]['file_type']]
# Creating the Message Request Packet
self._last_sequence = pack_uint(Base._get_sequence())
message_request = [
self._last_sequence,
'\x4b',
'\x02',
CLASS_ID["8-bit"],
PATH["PCCC"],
'\x07',
self.attribs['vid'],
self.attribs['vsn'],
'\x0f',
'\x00',
self._last_sequence[1],
self._last_sequence[0],
res[2]['read_func'],
pack_usint(data_size * n),
pack_usint(int(res[2]['file_number'])),
PCCC_DATA_TYPE[res[2]['file_type']],
pack_usint(int(res[2]['element_number'])),
pack_usint(sub_element)
]
self.logger.debug("SLC read_tag({0},{1})".format(tag, n))
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,)):
sts = int(unpack_usint(self._reply[58]))
try:
if sts != 0:
sts_txt = PCCC_ERROR_CODE[sts]
self._status = (1000, "Error({0}) returned from read_tag({1},{2})".format(sts_txt, tag, n))
self.logger.warning(self._status)
raise DataError("Error({0}) returned from read_tag({1},{2})".format(sts_txt, tag, n))
new_value = 61
if bit_read:
if res[2]['file_type'] == 'T' or res[2]['file_type'] == 'C':
if bit_position == PCCC_CT['PRE']:
return UNPACK_PCCC_DATA_FUNCTION[res[2]['file_type']](
self._reply[new_value+2:new_value+2+data_size])
elif bit_position == PCCC_CT['ACC']:
return UNPACK_PCCC_DATA_FUNCTION[res[2]['file_type']](
self._reply[new_value+4:new_value+4+data_size])
tag_value = UNPACK_PCCC_DATA_FUNCTION[res[2]['file_type']](
self._reply[new_value:new_value+data_size])
return get_bit(tag_value, bit_position)
else:
values_list = []
while len(self._reply[new_value:]) >= data_size:
values_list.append(
UNPACK_PCCC_DATA_FUNCTION[res[2]['file_type']](self._reply[new_value:new_value+data_size])
)
new_value = new_value+data_size
if len(values_list) > 1:
return values_list
else:
return values_list[0]
except Exception as e:
self._status = (1000, "Error({0}) parsing the data returned from read_tag({1},{2})".format(e, tag, n))
self.logger.warning(self._status)
raise DataError("Error({0}) parsing the data returned from read_tag({1},{2})".format(e, tag, n))
else:
raise DataError("send_unit_data returned not valid data")
def write_tag(self, tag, value):
""" write tag from a connected plc
Possible combination can be passed to this method:
c.write_tag('N7:0', [-30, 32767, -32767])
c.write_tag('N7:0', 21)
c.read_tag('N7:0', 10)
It is not possible to write status bit
:return: None is returned in case of error
"""
res = parse_tag(tag)
if not res[0]:
self._status = (1000, "Error parsing the tag passed to read_tag({0},{1})".format(tag, value))
self.logger.warning(self._status)
raise DataError("Error parsing the tag passed to read_tag({0},{1})".format(tag, value))
if isinstance(value, list) and int(res[2]['address_field'] == 3):
self._status = (1000, "Function's parameters error. read_tag({0},{1})".format(tag, value))
self.logger.warning(self._status)
raise DataError("Function's parameters error. read_tag({0},{1})".format(tag, value))
if isinstance(value, list) and int(res[2]['address_field'] == 3):
self._status = (1000, "Function's parameters error. read_tag({0},{1})".format(tag, value))
self.logger.warning(self._status)
raise DataError("Function's parameters error. read_tag({0},{1})".format(tag, value))
bit_field = False
bit_position = 0
sub_element = 0
if int(res[2]['address_field'] == 3):
bit_field = True
bit_position = int(res[2]['sub_element'])
values_list = ''
else:
values_list = '\xff\xff'
multi_requests = False
if isinstance(value, list):
multi_requests = True
if not self._target_is_connected:
if not self.forward_open():
self._status = (1000, "Target did not connected. write_tag will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. write_tag will not be executed.")
try:
n = 0
if multi_requests:
data_size = PCCC_DATA_SIZE[res[2]['file_type']]
for v in value:
values_list += PACK_PCCC_DATA_FUNCTION[res[2]['file_type']](v)
n += 1
else:
n = 1
if bit_field:
data_size = 2
if (res[2]['file_type'] == 'T' or res[2]['file_type'] == 'C') \
and (bit_position == PCCC_CT['PRE'] or bit_position == PCCC_CT['ACC']):
sub_element = bit_position
values_list = '\xff\xff' + PACK_PCCC_DATA_FUNCTION[res[2]['file_type']](value)
else:
sub_element = 0
if value > 0:
values_list = pack_uint(math.pow(2, bit_position)) + pack_uint(math.pow(2, bit_position))
else:
values_list = pack_uint(math.pow(2, bit_position)) + pack_uint(0)
else:
values_list += PACK_PCCC_DATA_FUNCTION[res[2]['file_type']](value)
data_size = PCCC_DATA_SIZE[res[2]['file_type']]
except Exception as e:
self._status = (1000, "Error({0}) packing the values to write to the"
"SLC write_tag({1},{2})".format(e, tag, value))
self.logger.warning(self._status)
raise DataError("Error({0}) packing the values to write to the "
"SLC write_tag({1},{2})".format(e, tag, value))
data_to_write = values_list
# Creating the Message Request Packet
self._last_sequence = pack_uint(Base._get_sequence())
message_request = [
self._last_sequence,
'\x4b',
'\x02',
CLASS_ID["8-bit"],
PATH["PCCC"],
'\x07',
self.attribs['vid'],
self.attribs['vsn'],
'\x0f',
'\x00',
self._last_sequence[1],
self._last_sequence[0],
res[2]['write_func'],
pack_usint(data_size * n),
pack_usint(int(res[2]['file_number'])),
PCCC_DATA_TYPE[res[2]['file_type']],
pack_usint(int(res[2]['element_number'])),
pack_usint(sub_element)
]
self.logger.debug("SLC write_tag({0},{1})".format(tag, value))
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request) + data_to_write,
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,)):
sts = int(unpack_usint(self._reply[58]))
try:
if sts != 0:
sts_txt = PCCC_ERROR_CODE[sts]
self._status = (1000, "Error({0}) returned from SLC write_tag({1},{2})".format(sts_txt, tag, value))
self.logger.warning(self._status)
raise DataError("Error({0}) returned from SLC write_tag({1},{2})".format(sts_txt, tag, value))
return True
except Exception as e:
self._status = (1000, "Error({0}) parsing the data returned from "
"SLC write_tag({1},{2})".format(e, tag, value))
self.logger.warning(self._status)
raise DataError("Error({0}) parsing the data returned from "
"SLC write_tag({1},{2})".format(e, tag, value))
else:
raise DataError("send_unit_data returned not valid data")

Binary file not shown.

View File

@@ -1 +0,0 @@
__author__ = 'agostino'

Binary file not shown.

View File

@@ -1,827 +0,0 @@
# -*- coding: utf-8 -*-
#
# cip_base.py - A set of classes methods and structures used to implement Ethernet/IP
#
#
# Copyright (c) 2014 Agostino Ruscito <ruscito@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import struct
import socket
from os import getpid
from pycomm_micro.cip.cip_const import *
from pycomm_micro.common import PycommError
class CommError(PycommError):
pass
class DataError(PycommError):
pass
def pack_sint(n):
return struct.pack('b', n)
def pack_usint(n):
return struct.pack('B', n)
def pack_int(n):
"""pack 16 bit into 2 bytes little endian"""
return struct.pack('<h', n)
def pack_uint(n):
"""pack 16 bit into 2 bytes little endian"""
# print("N: {0}".format(n))
return struct.pack('<H', n)
def pack_dint(n):
"""pack 32 bit into 4 bytes little endian"""
return struct.pack('<i', n)
def pack_real(r):
"""unpack 4 bytes little endian to int"""
return struct.pack('<f', r)
def pack_lint(l):
"""unpack 4 bytes little endian to int"""
return struct.unpack('<q', l)
def unpack_bool(st):
if int(struct.unpack('B', st[0])[0]) == 0:
return 0
return 1
def unpack_sint(st):
return int(struct.unpack('b', st[0])[0])
def unpack_usint(st):
return int(struct.unpack('B', st[0])[0])
def unpack_int(st):
"""unpack 2 bytes little endian to int"""
return int(struct.unpack('<h', st[0:2])[0])
def unpack_uint(st):
"""unpack 2 bytes little endian to int"""
return int(struct.unpack('<H', st[0:2])[0])
def unpack_dint(st):
"""unpack 4 bytes little endian to int"""
return int(struct.unpack('<i', st[0:4])[0])
def unpack_real(st):
"""unpack 4 bytes little endian to int"""
return float(struct.unpack('<f', st[0:4])[0])
def unpack_lreal(st):
"""unpack 8 bytes little endian to int"""
return float(struct.unpack('<f', st[0:8])[0])
def unpack_lint(st):
"""unpack 4 bytes little endian to int"""
return int(struct.unpack('<q', st[0:8])[0])
def get_bit(value, idx):
""":returns value of bit at position idx"""
return (value & (1 << idx)) != 0
PACK_DATA_FUNCTION = {
'BOOL': pack_sint,
'SINT': pack_sint, # Signed 8-bit integer
'INT': pack_int, # Signed 16-bit integer
'UINT': pack_uint, # Unsigned 16-bit integer
'USINT': pack_usint, # Unsigned 8-bit integer
'DINT': pack_dint, # Signed 32-bit integer
'REAL': pack_real, # 32-bit floating point
'LREAL': pack_real, # 32-bit floating point
'LINT': pack_lint,
'BYTE': pack_sint, # byte string 8-bits
'WORD': pack_uint, # byte string 16-bits
'DWORD': pack_dint, # byte string 32-bits
'LWORD': pack_lint # byte string 64-bits
}
UNPACK_DATA_FUNCTION = {
'BOOL': unpack_bool,
'SINT': unpack_sint, # Signed 8-bit integer
'INT': unpack_int, # Signed 16-bit integer
'UINT': unpack_uint, # Unsigned 16-bit
'USINT': unpack_usint, # Unsigned 8-bit integer
'DINT': unpack_dint, # Signed 32-bit integer
'UDINT': unpack_dint, # Signed 32-bit integer
'REAL': unpack_real, # 32-bit floating point,
'LREAL': unpack_lreal, # 32-bit floating point,
'LINT': unpack_lint,
'BYTE': unpack_sint, # byte string 8-bits
'WORD': unpack_uint, # byte string 16-bits
'DWORD': unpack_dint, # byte string 32-bits
'LWORD': unpack_lint # byte string 64-bits
}
DATA_FUNCTION_SIZE = {
'BOOL': 1,
'SINT': 1, # Signed 8-bit integer
'INT': 2, # Signed 16-bit integer
'UINT': 2, # Unsigned 16-bit integer
'DINT': 4, # Signed 32-bit integer
'REAL': 4, # 32-bit floating point
'LINT': 8,
'BYTE': 1, # byte string 8-bits
'WORD': 2, # byte string 16-bits
'DWORD': 4, # byte string 32-bits
'LWORD': 8 # byte string 64-bits
}
UNPACK_PCCC_DATA_FUNCTION = {
'N': unpack_int,
'B': unpack_int,
'T': unpack_int,
'C': unpack_int,
'S': unpack_int,
'F': unpack_real,
'A': unpack_sint,
'R': unpack_dint,
'O': unpack_int,
'I': unpack_int
}
PACK_PCCC_DATA_FUNCTION = {
'N': pack_int,
'B': pack_int,
'T': pack_int,
'C': pack_int,
'S': pack_int,
'F': pack_real,
'A': pack_sint,
'R': pack_dint,
'O': pack_int,
'I': pack_int
}
def print_bytes_line(msg):
out = ''
for ch in msg:
out += "{:0>2x}".format(ord(ch))
return out
def print_bytes_msg(msg, info=''):
out = info
new_line = True
line = 0
column = 0
for idx, ch in enumerate(msg):
if new_line:
out += "\n({:0>4d}) ".format(line * 10)
new_line = False
out += "{:0>2x} ".format(ord(ch))
if column == 9:
new_line = True
column = 0
line += 1
else:
column += 1
return out
def get_extended_status(msg, start):
status = unpack_usint(msg[start:start+1])
# send_rr_data
# 42 General Status
# 43 Size of additional status
# 44..n additional status
# send_unit_data
# 48 General Status
# 49 Size of additional status
# 50..n additional status
extended_status_size = (unpack_usint(msg[start+1:start+2]))*2
extended_status = 0
if extended_status_size != 0:
# There is an additional status
if extended_status_size == 1:
extended_status = unpack_usint(msg[start+2:start+3])
elif extended_status_size == 2:
extended_status = unpack_uint(msg[start+2:start+4])
elif extended_status_size == 4:
extended_status = unpack_dint(msg[start+2:start+6])
else:
return 'Extended Status Size Unknown'
try:
return '{0}'.format(EXTEND_CODES[status][extended_status])
except LookupError:
return "Extended Status info not present"
def create_tag_rp(tag, multi_requests=False):
""" Create tag Request Packet
It returns the request packed wrapped around the tag passed.
If any error it returns none
"""
tags = tag.split('.')
rp = []
index = []
for tag in tags:
add_index = False
# Check if is an array tag
if tag.find('[') != -1:
# Remove the last square bracket
tag = tag[:len(tag)-1]
# Isolate the value inside bracket
inside_value = tag[tag.find('[')+1:]
# Now split the inside value in case part of multidimensional array
index = inside_value.split(',')
# Flag the existence of one o more index
add_index = True
# Get only the tag part
tag = tag[:tag.find('[')]
tag_length = len(tag)
# Create the request path
rp.append(EXTENDED_SYMBOL) # ANSI Ext. symbolic segment
rp.append(chr(tag_length)) # Length of the tag
# Add the tag to the Request path
for char in tag:
rp.append(char)
# Add pad byte because total length of Request path must be word-aligned
if tag_length % 2:
rp.append(PADDING_BYTE)
# Add any index
if add_index:
for idx in index:
val = int(idx)
if val <= 0xff:
rp.append(ELEMENT_ID["8-bit"])
rp.append(pack_usint(val))
elif val <= 0xffff:
rp.append(ELEMENT_ID["16-bit"]+PADDING_BYTE)
rp.append(pack_uint(val))
elif val <= 0xfffffffff:
rp.append(ELEMENT_ID["32-bit"]+PADDING_BYTE)
rp.append(pack_dint(val))
else:
# Cannot create a valid request packet
return None
# At this point the Request Path is completed,
if multi_requests:
request_path = chr(len(rp)/2) + ''.join(rp)
else:
request_path = ''.join(rp)
return request_path
def build_common_packet_format(message_type, message, addr_type, addr_data=None, timeout=10):
""" build_common_packet_format
It creates the common part for a CIP message. Check Volume 2 (page 2.22) of CIP specification for reference
"""
msg = pack_dint(0) # Interface Handle: shall be 0 for CIP
msg += pack_uint(timeout) # timeout
msg += pack_uint(2) # Item count: should be at list 2 (Address and Data)
msg += addr_type # Address Item Type ID
if addr_data is not None:
msg += pack_uint(len(addr_data)) # Address Item Length
msg += addr_data
else:
msg += pack_uint(0) # Address Item Length
msg += message_type # Data Type ID
msg += pack_uint(len(message)) # Data Item Length
msg += message
return msg
def build_multiple_service(rp_list, sequence=None):
mr = []
if sequence is not None:
mr.append(pack_uint(sequence))
mr.append(chr(TAG_SERVICES_REQUEST["Multiple Service Packet"])) # the Request Service
mr.append(pack_usint(2)) # the Request Path Size length in word
mr.append(CLASS_ID["8-bit"])
mr.append(CLASS_CODE["Message Router"])
mr.append(INSTANCE_ID["8-bit"])
mr.append(pack_usint(1)) # Instance 1
mr.append(pack_uint(len(rp_list))) # Number of service contained in the request
# Offset calculation
offset = (len(rp_list) * 2) + 2
for index, rp in enumerate(rp_list):
if index == 0:
mr.append(pack_uint(offset)) # Starting offset
else:
mr.append(pack_uint(offset))
offset += len(rp)
for rp in rp_list:
mr.append(rp)
return mr
def parse_multiple_request(message, tags, typ):
""" parse_multi_request
This function should be used to parse the message replayed to a multi request service rapped around the
send_unit_data message.
:param message: the full message returned from the PLC
:param tags: The list of tags to be read
:param typ: to specify if multi request service READ or WRITE
:return: a list of tuple in the format [ (tag name, value, data type), ( tag name, value, data type) ].
In case of error the tuple will be (tag name, None, None)
"""
offset = 50
position = 50
number_of_service_replies = unpack_uint(message[offset:offset+2])
tag_list = []
for index in range(number_of_service_replies):
position += 2
start = offset + unpack_uint(message[position:position+2])
general_status = unpack_usint(message[start+2:start+3])
if general_status == 0:
if typ == "READ":
data_type = unpack_uint(message[start+4:start+6])
try:
value_begin = start + 6
value_end = value_begin + DATA_FUNCTION_SIZE[I_DATA_TYPE[data_type]]
value = message[value_begin:value_end]
tag_list.append((tags[index],
UNPACK_DATA_FUNCTION[I_DATA_TYPE[data_type]](value),
I_DATA_TYPE[data_type]))
except LookupError:
tag_list.append((tags[index], None, None))
else:
tag_list.append((tags[index] + ('GOOD',)))
else:
if typ == "READ":
tag_list.append((tags[index], None, None))
else:
tag_list.append((tags[index] + ('BAD',)))
return tag_list
class Socket:
def __init__(self, timeout=5.0):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(timeout)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
def connect(self, host, port):
try:
self.sock.connect((host, port))
except socket.timeout:
raise CommError("Socket timeout during connection.")
def send(self, msg, timeout=0):
if timeout != 0:
self.sock.settimeout(timeout)
total_sent = 0
while total_sent < len(msg):
try:
sent = self.sock.send(msg[total_sent:])
if sent == 0:
raise CommError("socket connection broken.")
total_sent += sent
except socket.error:
raise CommError("socket connection broken.")
return total_sent
def receive(self, timeout=0):
if timeout != 0:
self.sock.settimeout(timeout)
msg_len = 28
chunks = []
bytes_recd = 0
one_shot = True
while bytes_recd < msg_len:
try:
chunk = self.sock.recv(min(msg_len - bytes_recd, 2048))
if chunk == '':
raise CommError("socket connection broken.")
if one_shot:
data_size = int(struct.unpack('<H', chunk[2:4])[0]) # Length
msg_len = HEADER_SIZE + data_size
one_shot = False
chunks.append(chunk)
bytes_recd += len(chunk)
except socket.error as e:
raise CommError(e)
return ''.join(chunks)
def close(self):
self.sock.close()
def parse_symbol_type(symbol):
""" parse_symbol_type
It parse the symbol to Rockwell Spec
:param symbol: the symbol associated to a tag
:return: A tuple containing information about the tag
"""
pass
return None
class Base(object):
_sequence = 0
def __init__(self, logging):
if Base._sequence == 0:
Base._sequence = getpid()
else:
Base._sequence = Base._get_sequence()
self.logger = logging
self.__version__ = '0.1'
self.__sock = None
self._session = 0
self._connection_opened = False
self._reply = None
self._message = None
self._target_cid = None
self._target_is_connected = False
self._tag_list = []
self._buffer = {}
self._device_description = "Device Unknown"
self._last_instance = 0
self._byte_offset = 0
self._last_position = 0
self._more_packets_available = False
self._last_tag_read = ()
self._last_tag_write = ()
self._status = (0, "")
# self.attribs = {'context': '_pycomm_', 'protocol version': 1, 'rpi': 5000, 'port': 0xAF12, 'timeout': 10,
# 'backplane': 1, 'cpu slot': 0, 'option': 0, 'cid': '\x27\x04\x19\x71', 'csn': '\x27\x04',
# 'vid': '\x09\x10', 'vsn': '\x09\x10\x19\x71', 'name': 'Base', 'ip address': None}
self.attribs = {'context': '_pycomm_', 'protocol version': 1, 'rpi': 5000, 'port': 0xAF12, 'timeout': 10,
'backplane': 0, 'cpu slot': 0, 'option': 0, 'cid': '\x27\x04\x19\x71', 'csn': '\x27\x04',
'vid': '\x09\x10', 'vsn': '\x09\x10\x19\x71', 'name': 'Base', 'ip address': None}
def __len__(self):
return len(self.attribs)
def __getitem__(self, key):
return self.attribs[key]
def __setitem__(self, key, value):
self.attribs[key] = value
def __delitem__(self, key):
try:
del self.attribs[key]
except LookupError:
pass
def __iter__(self):
return iter(self.attribs)
def __contains__(self, item):
return item in self.attribs
def _check_reply(self):
raise Socket.ImplementationError("The method has not been implemented")
@staticmethod
def _get_sequence():
""" Increase and return the sequence used with connected messages
:return: The New sequence
"""
if Base._sequence < 65535:
Base._sequence += 1
else:
Base._sequence = getpid()
return Base._sequence
def nop(self):
""" No replay command
A NOP provides a way for either an originator or target to determine if the TCP connection is still open.
"""
self._message = self.build_header(ENCAPSULATION_COMMAND['nop'], 0)
self._send()
def __repr__(self):
return self._device_description
def description(self):
return self._device_description
def list_identity(self):
""" ListIdentity command to locate and identify potential target
return true if the replay contains the device description
"""
self._message = self.build_header(ENCAPSULATION_COMMAND['list_identity'], 0)
self._send()
self._receive()
if self._check_reply():
try:
self._device_description = self._reply[63:-1]
return True
except Exception as e:
raise CommError(e)
return False
def send_rr_data(self, msg):
""" SendRRData transfer an encapsulated request/reply packet between the originator and target
:param msg: The message to be send to the target
:return: the replay received from the target
"""
self._message = self.build_header(ENCAPSULATION_COMMAND["send_rr_data"], len(msg))
self._message += msg
self._send()
self._receive()
return self._check_reply()
def send_unit_data(self, msg):
""" SendUnitData send encapsulated connected messages.
:param msg: The message to be send to the target
:return: the replay received from the target
"""
self._message = self.build_header(ENCAPSULATION_COMMAND["send_unit_data"], len(msg))
self._message += msg
self._send()
self._receive()
return self._check_reply()
def get_status(self):
""" Get the last status/error
This method can be used after any call to get any details in case of error
:return: A tuple containing (error group, error message)
"""
return self._status
def clear(self):
""" Clear the last status/error
:return: return am empty tuple
"""
self._status = (0, "")
def build_header(self, command, length):
""" Build the encapsulate message header
The header is 24 bytes fixed length, and includes the command and the length of the optional data portion.
:return: the headre
"""
try:
h = command # Command UINT
h += pack_uint(length) # Length UINT
h += pack_dint(self._session) # Session Handle UDINT
h += pack_dint(0) # Status UDINT
h += self.attribs['context'] # Sender Context 8 bytes
h += pack_dint(self.attribs['option']) # Option UDINT
return h
except Exception as e:
raise CommError(e)
def register_session(self):
""" Register a new session with the communication partner
:return: None if any error, otherwise return the session number
"""
if self._session:
return self._session
self._session = 0
self._message = self.build_header(ENCAPSULATION_COMMAND['register_session'], 4)
self._message += pack_uint(self.attribs['protocol version'])
self._message += pack_uint(0)
self._send()
self._receive()
if self._check_reply():
self._session = unpack_dint(self._reply[4:8])
self.logger.debug("Session ={0} has been registered.".format(print_bytes_line(self._reply[4:8])))
return self._session
self._status = 'Warning ! the session has not been registered.'
self.logger.warning(self._status)
return None
def forward_open(self):
""" CIP implementation of the forward open message
Refer to ODVA documentation Volume 1 3-5.5.2
:return: False if any error in the replayed message
"""
if self._session == 0:
self._status = (4, "A session need to be registered before to call forward_open.")
raise CommError("A session need to be registered before to call forward open")
forward_open_msg = [
FORWARD_OPEN,
pack_usint(2),
CLASS_ID["8-bit"],
CLASS_CODE["Connection Manager"], # Volume 1: 5-1
INSTANCE_ID["8-bit"],
CONNECTION_MANAGER_INSTANCE['Open Request'],
PRIORITY,
TIMEOUT_TICKS,
pack_dint(0),
self.attribs['cid'],
self.attribs['csn'],
self.attribs['vid'],
self.attribs['vsn'],
TIMEOUT_MULTIPLIER,
'\x00\x00\x00',
pack_dint(self.attribs['rpi'] * 1000),
pack_uint(CONNECTION_PARAMETER['Default']),
pack_dint(self.attribs['rpi'] * 1000),
pack_uint(CONNECTION_PARAMETER['Default']),
TRANSPORT_CLASS, # Transport Class
# CONNECTION_SIZE['Backplane'],
CONNECTION_SIZE['Direct Network'],
# pack_usint(self.attribs['backplane']),
# pack_usint(self.attribs['cpu slot']),
CLASS_ID["8-bit"],
CLASS_CODE["Message Router"],
INSTANCE_ID["8-bit"],
pack_usint(1)
]
if self.send_rr_data(
build_common_packet_format(DATA_ITEM['Unconnected'], ''.join(forward_open_msg), ADDRESS_ITEM['UCMM'],)):
self._target_cid = self._reply[44:48]
self._target_is_connected = True
return True
self._status = (4, "forward_open returned False")
return False
def forward_close(self):
""" CIP implementation of the forward close message
Each connection opened with the froward open message need to be closed.
Refer to ODVA documentation Volume 1 3-5.5.3
:return: False if any error in the replayed message
"""
if self._session == 0:
self._status = (5, "A session need to be registered before to call forward_close.")
raise CommError("A session need to be registered before to call forward_close.")
# print ("Backplane:{0}\nCPU:{1}".format(self.attribs['backplane'], self.attribs['cpu slot']))
forward_close_msg = [
FORWARD_CLOSE,
pack_usint(2),
CLASS_ID["8-bit"],
CLASS_CODE["Connection Manager"], # Volume 1: 5-1
INSTANCE_ID["8-bit"],
CONNECTION_MANAGER_INSTANCE['Open Request'],
PRIORITY,
TIMEOUT_TICKS,
self.attribs['csn'],
self.attribs['vid'],
self.attribs['vsn'],
CONNECTION_SIZE['Direct Network'],
# CONNECTION_SIZE['Backplane'],
'\x00', # Reserved
# pack_usint(self.attribs['backplane']),
# pack_usint(self.attribs['cpu slot']),
CLASS_ID["8-bit"],
CLASS_CODE["Message Router"],
INSTANCE_ID["8-bit"],
pack_usint(1)
]
if self.send_rr_data(
build_common_packet_format(DATA_ITEM['Unconnected'], ''.join(forward_close_msg), ADDRESS_ITEM['UCMM'])):
self._target_is_connected = False
return True
self._status = (5, "forward_close returned False")
self.logger.warning(self._status)
return False
def un_register_session(self):
""" Un-register a connection
"""
self._message = self.build_header(ENCAPSULATION_COMMAND['unregister_session'], 0)
self._send()
self._session = None
def _send(self):
"""
socket send
:return: true if no error otherwise false
"""
try:
self.logger.debug(print_bytes_msg(self._message, '-------------- SEND --------------'))
self.__sock.send(self._message)
except Exception as e:
#self.clean_up()
raise CommError(e)
def _receive(self):
"""
socket receive
:return: true if no error otherwise false
"""
try:
self._reply = self.__sock.receive()
self.logger.debug(print_bytes_msg(self._reply, '----------- RECEIVE -----------'))
except Exception as e:
#self.clean_up()
raise CommError(e)
def open(self, ip_address):
"""
socket open
:return: true if no error otherwise false
"""
# handle the socket layer
if not self._connection_opened:
try:
if self.__sock is None:
self.__sock = Socket()
self.__sock.connect(ip_address, self.attribs['port'])
self._connection_opened = True
self.attribs['ip address'] = ip_address
if self.register_session() is None:
self._status = (13, "Session not registered")
return False
self.forward_close()
return True
except Exception as e:
#self.clean_up()
raise CommError(e)
def close(self):
"""
socket close
:return: true if no error otherwise false
"""
try:
if self._target_is_connected:
self.forward_close()
if self._session != 0:
self.un_register_session()
if self.__sock:
self.__sock.close()
except Exception as e:
raise CommError(e)
self.clean_up()
def clean_up(self):
self.__sock = None
self._target_is_connected = False
self._session = 0
self._connection_opened = False
def is_connected(self):
return self._connection_opened

Binary file not shown.

View File

@@ -1,482 +0,0 @@
# -*- coding: utf-8 -*-
#
# cip_const.py - A set of structures and constants used to implement the Ethernet/IP protocol
#
#
# Copyright (c) 2014 Agostino Ruscito <ruscito@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
ELEMENT_ID = {
"8-bit": '\x28',
"16-bit": '\x29',
"32-bit": '\x2a'
}
CLASS_ID = {
"8-bit": '\x20',
"16-bit": '\x21',
}
INSTANCE_ID = {
"8-bit": '\x24',
"16-bit": '\x25'
}
ATTRIBUTE_ID = {
"8-bit": '\x30',
"16-bit": '\x31'
}
# Path are combined as:
# CLASS_ID + PATHS
# For example PCCC path is CLASS_ID["8-bit"]+PATH["PCCC"] -> 0x20, 0x67, 0x24, 0x01.
PATH = {
'Connection Manager': '\x06\x24\x01',
'Router': '\x02\x24\x01',
'Backplane Data Type': '\x66\x24\x01',
'PCCC': '\x67\x24\x01',
'DHCP Channel A': '\xa6\x24\x01\x01\x2c\x01',
'DHCP Channel B': '\xa6\x24\x01\x02\x2c\x01'
}
ENCAPSULATION_COMMAND = { # Volume 2: 2-3.2 Command Field UINT 2 byte
"nop": '\x00\x00',
"list_targets": '\x01\x00',
"list_services": '\x04\x00',
"list_identity": '\x63\x00',
"list_interfaces": '\x64\x00',
"register_session": '\x65\x00',
"unregister_session": '\x66\x00',
"send_rr_data": '\x6F\x00',
"send_unit_data": '\x70\x00'
}
"""
When a tag is created, an instance of the Symbol Object (Class ID 0x6B) is created
inside the controller.
When a UDT is created, an instance of the Template object (Class ID 0x6C) is
created to hold information about the structure makeup.
"""
CLASS_CODE = {
"Message Router": '\x02', # Volume 1: 5-1
"Symbol Object": '\x6b',
"Template Object": '\x6c',
"Connection Manager": '\x06' # Volume 1: 3-5
}
CONNECTION_MANAGER_INSTANCE = {
'Open Request': '\x01',
'Open Format Rejected': '\x02',
'Open Resource Rejected': '\x03',
'Open Other Rejected': '\x04',
'Close Request': '\x05',
'Close Format Request': '\x06',
'Close Other Request': '\x07',
'Connection Timeout': '\x08'
}
TAG_SERVICES_REQUEST = {
"Read Tag": 0x4c,
"Read Tag Fragmented": 0x52,
"Write Tag": 0x4d,
"Write Tag Fragmented": 0x53,
"Read Modify Write Tag": 0x4e,
"Multiple Service Packet": 0x0a,
"Get Instance Attributes List": 0x55,
"Get Attributes": 0x03,
"Read Template": 0x4c,
}
TAG_SERVICES_REPLY = {
0xcc: "Read Tag",
0xd2: "Read Tag Fragmented",
0xcd: "Write Tag",
0xd3: "Write Tag Fragmented",
0xce: "Read Modify Write Tag",
0x8a: "Multiple Service Packet",
0xd5: "Get Instance Attributes List",
0x83: "Get Attributes",
0xcc: "Read Template"
}
I_TAG_SERVICES_REPLY = {
"Read Tag": 0xcc,
"Read Tag Fragmented": 0xd2,
"Write Tag": 0xcd,
"Write Tag Fragmented": 0xd3,
"Read Modify Write Tag": 0xce,
"Multiple Service Packet": 0x8a,
"Get Instance Attributes List": 0xd5,
"Get Attributes": 0x83,
"Read Template": 0xcc
}
"""
EtherNet/IP Encapsulation Error Codes
Standard CIP Encapsulation Error returned in the cip message header
"""
STATUS = {
0x0000: "Success",
0x0001: "The sender issued an invalid or unsupported encapsulation command",
0x0002: "Insufficient memory",
0x0003: "Poorly formed or incorrect data in the data portion",
0x0064: "An originator used an invalid session handle when sending an encapsulation message to the target",
0x0065: "The target received a message of invalid length",
0x0069: "Unsupported Protocol Version"
}
"""
MSG Error Codes:
The following error codes have been taken from:
Rockwell Automation Publication
1756-RM003P-EN-P - December 2014
"""
SERVICE_STATUS = {
0x01: "Connection failure (see extended status)",
0x02: "Insufficient resource",
0x03: "Invalid value",
0x04: "IOI syntax error. A syntax error was detected decoding the Request Path (see extended status)",
0x05: "Destination unknown, class unsupported, instance \nundefined or structure element undefined (see extended status)",
0x06: "Insufficient Packet Space",
0x07: "Connection lost",
0x08: "Service not supported",
0x09: "Error in data segment or invalid attribute value",
0x0A: "Attribute list error",
0x0B: "State already exist",
0x0C: "Object state conflict",
0x0D: "Object already exist",
0x0E: "Attribute not settable",
0x0F: "Permission denied",
0x10: "Device state conflict",
0x11: "Reply data too large",
0x12: "Fragmentation of a primitive value",
0x13: "Insufficient command data",
0x14: "Attribute not supported",
0x15: "Too much data",
0x1A: "Bridge request too large",
0x1B: "Bridge response too large",
0x1C: "Attribute list shortage",
0x1D: "Invalid attribute list",
0x1E: "Request service error",
0x1F: "Connection related failure (see extended status)",
0x22: "Invalid reply received",
0x25: "Key segment error",
0x26: "Invalid IOI error",
0x27: "Unexpected attribute in list",
0x28: "DeviceNet error - invalid member ID",
0x29: "DeviceNet error - member not settable",
0xD1: "Module not in run state",
0xFB: "Message port not supported",
0xFC: "Message unsupported data type",
0xFD: "Message uninitialized",
0xFE: "Message timeout",
0xff: "General Error (see extended status)"
}
EXTEND_CODES = {
0x01: {
0x0100: "Connection in use",
0x0103: "Transport not supported",
0x0106: "Ownership conflict",
0x0107: "Connection not found",
0x0108: "Invalid connection type",
0x0109: "Invalid connection size",
0x0110: "Module not configured",
0x0111: "EPR not supported",
0x0114: "Wrong module",
0x0115: "Wrong device type",
0x0116: "Wrong revision",
0x0118: "Invalid configuration format",
0x011A: "Application out of connections",
0x0203: "Connection timeout",
0x0204: "Unconnected message timeout",
0x0205: "Unconnected send parameter error",
0x0206: "Message too large",
0x0301: "No buffer memory",
0x0302: "Bandwidth not available",
0x0303: "No screeners available",
0x0305: "Signature match",
0x0311: "Port not available",
0x0312: "Link address not available",
0x0315: "Invalid segment type",
0x0317: "Connection not scheduled"
},
0x04: {
0x0000: "Extended status out of memory",
0x0001: "Extended status out of instances"
},
0x05: {
0x0000: "Extended status out of memory",
0x0001: "Extended status out of instances"
},
0x1F: {
0x0203: "Connection timeout"
},
0xff: {
0x7: "Wrong data type",
0x2001: "Excessive IOI",
0x2002: "Bad parameter value",
0x2018: "Semaphore reject",
0x201B: "Size too small",
0x201C: "Invalid size",
0x2100: "Privilege failure",
0x2101: "Invalid keyswitch position",
0x2102: "Password invalid",
0x2103: "No password issued",
0x2104: "Address out of range",
0x2105: "Address and how many out of range",
0x2106: "Data in use",
0x2107: "Type is invalid or not supported",
0x2108: "Controller in upload or download mode",
0x2109: "Attempt to change number of array dimensions",
0x210A: "Invalid symbol name",
0x210B: "Symbol does not exist",
0x210E: "Search failed",
0x210F: "Task cannot start",
0x2110: "Unable to write",
0x2111: "Unable to read",
0x2112: "Shared routine not editable",
0x2113: "Controller in faulted mode",
0x2114: "Run mode inhibited"
}
}
DATA_ITEM = {
'Connected': '\xb1\x00',
'Unconnected': '\xb2\x00'
}
ADDRESS_ITEM = {
'Connection Based': '\xa1\x00',
'Null': '\x00\x00',
'UCMM': '\x00\x00'
}
UCMM = {
'Interface Handle': 0,
'Item Count': 2,
'Address Type ID': 0,
'Address Length': 0,
'Data Type ID': 0x00b2
}
CONNECTION_SIZE = {
'Backplane': '\x03', # CLX
'Direct Network': '\x02'
}
HEADER_SIZE = 24
EXTENDED_SYMBOL = '\x91'
BOOL_ONE = 0xff
REQUEST_SERVICE = 0
REQUEST_PATH_SIZE = 1
REQUEST_PATH = 2
SUCCESS = 0
INSUFFICIENT_PACKETS = 6
OFFSET_MESSAGE_REQUEST = 40
FORWARD_CLOSE = '\x4e'
UNCONNECTED_SEND = '\x52'
FORWARD_OPEN = '\x54'
LARGE_FORWARD_OPEN = '\x5b'
GET_CONNECTION_DATA = '\x56'
SEARCH_CONNECTION_DATA = '\x57'
GET_CONNECTION_OWNER = '\x5a'
MR_SERVICE_SIZE = 2
PADDING_BYTE = '\x00'
PRIORITY = '\x0a'
TIMEOUT_TICKS = '\x05'
TIMEOUT_MULTIPLIER = '\x01'
TRANSPORT_CLASS = '\xa3'
CONNECTION_PARAMETER = {
'PLC5': 0x4302,
'SLC500': 0x4302,
'CNET': 0x4320,
'DHP': 0x4302,
'Default': 0x43f8,
}
"""
Atomic Data Type:
Bit = Bool
Bit array = DWORD (32-bit boolean aray)
8-bit integer = SINT
16-bit integer = UINT
32-bit integer = DINT
32-bit float = REAL
64-bit integer = LINT
From Rockwell Automation Publication 1756-PM020C-EN-P November 2012:
When reading a BOOL tag, the values returned for 0 and 1 are 0 and 0xff, respectively.
"""
S_DATA_TYPE = {
'BOOL': 0xc1,
'SINT': 0xc2, # Signed 8-bit integer
'INT': 0xc3, # Signed 16-bit integer
'DINT': 0xc4, # Signed 32-bit integer
'LINT': 0xc5, # Signed 64-bit integer
'USINT': 0xc6, # Unsigned 8-bit integer
'UINT': 0xc7, # Unsigned 16-bit integer
'UDINT': 0xc8, # Unsigned 32-bit integer
'ULINT': 0xc9, # Unsigned 64-bit integer
'REAL': 0xca, # 32-bit floating point
'LREAL': 0xcb, # 64-bit floating point
'STIME': 0xcc, # Synchronous time
'DATE': 0xcd,
'TIME_OF_DAY': 0xce,
'DATE_AND_TIME': 0xcf,
'STRING': 0xd0, # character string (1 byte per character)
'BYTE': 0xd1, # byte string 8-bits
'WORD': 0xd2, # byte string 16-bits
'DWORD': 0xd3, # byte string 32-bits
'LWORD': 0xd4, # byte string 64-bits
'STRING2': 0xd5, # character string (2 byte per character)
'FTIME': 0xd6, # Duration high resolution
'LTIME': 0xd7, # Duration long
'ITIME': 0xd8, # Duration short
'STRINGN': 0xd9, # character string (n byte per character)
'SHORT_STRING': 0xda, # character string (1 byte per character, 1 byte length indicator)
'TIME': 0xdb, # Duration in milliseconds
'EPATH': 0xdc, # CIP Path segment
'ENGUNIT': 0xdd, # Engineering Units
'STRINGI': 0xde # International character string
}
I_DATA_TYPE = {
0xc1: 'BOOL',
0xc2: 'SINT', # Signed 8-bit integer
0xc3: 'INT', # Signed 16-bit integer
0xc4: 'DINT', # Signed 32-bit integer
0xc5: 'LINT', # Signed 64-bit integer
0xc6: 'USINT', # Unsigned 8-bit integer
0xc7: 'UINT', # Unsigned 16-bit integer
0xc8: 'UDINT', # Unsigned 32-bit integer
0xc9: 'ULINT', # Unsigned 64-bit integer
0xca: 'REAL', # 32-bit floating point
0xcb: 'LREAL', # 64-bit floating point
0xcc: 'STIME', # Synchronous time
0xcd: 'DATE',
0xce: 'TIME_OF_DAY',
0xcf: 'DATE_AND_TIME',
0xd0: 'STRING', # character string (1 byte per character)
0xd1: 'BYTE', # byte string 8-bits
0xd2: 'WORD', # byte string 16-bits
0xd3: 'DWORD', # byte string 32-bits
0xd4: 'LWORD', # byte string 64-bits
0xd5: 'STRING2', # character string (2 byte per character)
0xd6: 'FTIME', # Duration high resolution
0xd7: 'LTIME', # Duration long
0xd8: 'ITIME', # Duration short
0xd9: 'STRINGN', # character string (n byte per character)
0xda: 'SHORT_STRING', # character string (1 byte per character, 1 byte length indicator)
0xdb: 'TIME', # Duration in milliseconds
0xdc: 'EPATH', # CIP Path segment
0xdd: 'ENGUNIT', # Engineering Units
0xde: 'STRINGI' # International character string
}
REPLAY_INFO = {
0x4e: 'FORWARD_CLOSE (4E,00)',
0x52: 'UNCONNECTED_SEND (52,00)',
0x54: 'FORWARD_OPEN (54,00)',
0x6f: 'send_rr_data (6F,00)',
0x70: 'send_unit_data (70,00)',
0x00: 'nop',
0x01: 'list_targets',
0x04: 'list_services',
0x63: 'list_identity',
0x64: 'list_interfaces',
0x65: 'register_session',
0x66: 'unregister_session',
}
PCCC_DATA_TYPE = {
'N': '\x89',
'B': '\x85',
'T': '\x86',
'C': '\x87',
'S': '\x84',
'F': '\x8a',
'ST': '\x8d',
'A': '\x8e',
'R': '\x88',
'O': '\x8b',
'I': '\x8c'
}
PCCC_DATA_SIZE = {
'N': 2,
'B': 2,
'T': 6,
'C': 6,
'S': 2,
'F': 4,
'ST': 84,
'A': 2,
'R': 6,
'O': 2,
'I': 2
}
PCCC_CT = {
'PRE': 1,
'ACC': 2,
'EN': 15,
'TT': 14,
'DN': 13,
'CU': 15,
'CD': 14,
'OV': 12,
'UN': 11,
'UA': 10
}
PCCC_ERROR_CODE = {
-2: "Not Acknowledged (NAK)",
-3: "No Reponse, Check COM Settings",
-4: "Unknown Message from DataLink Layer",
-5: "Invalid Address",
-6: "Could Not Open Com Port",
-7: "No data specified to data link layer",
-8: "No data returned from PLC",
-20: "No Data Returned",
16: "Illegal Command or Format, Address may not exist or not enough elements in data file",
32: "PLC Has a Problem and Will Not Communicate",
48: "Remote Node Host is Missing, Disconnected, or Shut Down",
64: "Host Could Not Complete Function Due To Hardware Fault",
80: "Addressing problem or Memory Protect Rungs",
96: "Function not allows due to command protection selection",
112: "Processor is in Program mode",
128: "Compatibility mode file missing or communication zone problem",
144: "Remote node cannot buffer command",
240: "Error code in EXT STS Byte"
}

View File

@@ -1,32 +0,0 @@
__author__ = 'Agostino Ruscito'
__version__ = "1.0.7"
__date__ = "08 03 2015"
import logging
logging.basicConfig(
filename="pycomm.log",
filemode='w',
level=logging.INFO,
format="%(name)-13s %(levelname)-10s %(asctime)s %(message)s",
# propagate=0,
)
LOGGER = logging.getLogger('pycomm')
class PycommError(Exception):
pass
def setup_logger(name, level, filename=None):
log = logging.getLogger('pycomm.'+name)
log.setLevel(level)
if filename:
fh = logging.FileHandler(filename, mode='w')
fh.setFormatter(logging.Formatter("%(levelname)-10s %(asctime)s %(message)s"))
log.addHandler(fh)
log.propagate = False
return log

Binary file not shown.

View File

@@ -1,70 +0,0 @@
#!/usr/bin/env python
'''
Created on Dec 8, 2015
@author: Patrick McDonagh
'''
from datetime import datetime
import sys
from random import randint
import time
import MySQLdb
import tuxeip
#TUXEIP Connection to PLC
from tuxeip import TuxEIP, LGX, LGX_REAL
def main():
db = MySQLdb.connect(host="127.0.0.1",user="website",passwd="henrypump",db="TagData")
cur = db.cursor()
query = "SELECT * FROM TagData.tags WHERE deleted = 0;"
cur.execute(query)
tags = cur.fetchall()
# ((1L, 'DC_Bus_Voltage', datetime.datetime(2015, 12, 8, 16, 2, 32), 'V', 0L), (2L, 'Output_Frequency', datetime.datetime(2015, 12, 8, 16, 31, 12), 'Hz', 0L))
db.commit()
db.close()
PLC_IP_ADDRESS = "10.10.10.3" # MAKE THIS A db VALUE
scan_rate = 10
tagList = [];
if len(tags) > 0:
for t in tags:
tagList.append({"id":int(t[0]), "name":t[1], "val":None, "lastVal":None});
try:
tux = TuxEIP(libpath="/usr/lib/libtuxeip.so")
sess = tux.OpenSession(PLC_IP_ADDRESS)
reg = tux.RegisterSession(sess)
conn = tux.ConnectPLCOverCNET(sess, LGX, 1, 100, 123, randint(0,9999), 123, 321, 100, 5000, 1, '01')
while True:
for r in tagList:
r["val"] = tux.ReadLGXDataAsFloat(sess, conn, r['name'], 1)[0]
print("{0} - {1}".format(r["name"], r["val"]))
if not r["val"] == r["lastVal"]:
db = MySQLdb.connect(host="127.0.0.1",user="website",passwd="henrypump",db="TagData")
cur = db.cursor()
aQuery = """INSERT INTO TagData.values (tagID, val) VALUES ('%d', '%f');"""%(r["id"], float(r["val"]))
print(aQuery)
storeVal = cur.execute(aQuery)
db.commit()
db.close()
r["lastVal"] = r["val"]
time.sleep(10)
except Exception as err:
print err
pass
if __name__ == '__main__':
main()

View File

@@ -1,132 +0,0 @@
#!/usr/bin/env python
'''
Created on Dec 8, 2015
@author: Patrick McDonagh
'''
import time
import sqlite3 as lite
from pycomm.ab_comm.clx import Driver as ClxDriver
import micro800 as u800
import logging
# con = lite.connect("/usr/db/data.db")
con = lite.connect('/mnt/usb/data.db')
configProperties = {}
def readTag(addr, tag):
logging.basicConfig(
filename="ClxDriver.log",
format="%(levelname)-10s %(asctime)s %(message)s",
level=logging.DEBUG
)
c = ClxDriver()
if c.open(addr):
try:
v = c.read_tag(tag)
# print(v)
return v
except Exception as e:
err = c.get_status()
c.close()
print err
print e
pass
c.close()
def main():
with con:
cur = con.cursor()
query = "SELECT * FROM tags WHERE deleted = 0;"
cur.execute(query)
tags = cur.fetchall()
configObj = {}
with con:
cur = con.cursor()
query = "SELECT parameter, val FROM config GROUP BY parameter;"
cur.execute(query)
config = cur.fetchall()
for x in config:
configObj[x[0]] = x[1]
try:
configProperties['PLC_IP_ADDRESS'] = str(configObj['ip_address'])
print("FYI, using PLC IP Address from the database {0}".format(configProperties['PLC_IP_ADDRESS']))
except KeyError:
print("FYI, there is no PLC IP Address stored in the database, defaulting to 192.168.1.10")
configProperties['PLC_IP_ADDRESS'] = "192.168.1.10"
try:
configProperties['plc_type'] = str(configObj['plc_type'])
print("FYI, using PLC Type from the database {0}".format(configProperties['plc_type']))
except KeyError:
print("FYI, there is no PLC Type stored in the database, defaulting to CLX")
configProperties['plc_type'] = "CLX"
try:
configProperties['scan_rate'] = int(configObj['scan_rate'])
print("FYI, using Scan Rate from the database {0}".format(configProperties['scan_rate']))
except KeyError:
print("FYI, there is no Scan Rate stored in the database, defaulting to 10 seconds")
configProperties['scan_rate'] = 10
try:
sa_test = str(configObj['save_all'])
if sa_test == "true":
configProperties['save_all'] = True
else:
configProperties['save_all'] = False
print("FYI, value for save_all is {0}".format(configProperties['save_all']))
except KeyError:
print("FYI, there is no save_all value stored in the database, using False")
configProperties['save_all'] = False
tagList = []
print("\nScan List\n--------------")
if len(tags) > 0:
for t in tags:
tagList.append({"id": int(t[0]), "name": t[1], "val": None, "lastVal": None})
print(t[1])
print("--------------\n")
while True:
try:
for r in tagList:
r['val'] = 0
if configProperties['plc_type'] == "u800":
r["val"] = u800.readMicroTag(configProperties['PLC_IP_ADDRESS'], str(r['name']))[0]
else:
r["val"] = readTag(configProperties['PLC_IP_ADDRESS'], str(r['name']))[0]
print("{0} - {1}".format(r["name"], r["val"]))
if (not configProperties['save_all'] and not r["val"] == r["lastVal"]) or configProperties['save_all']:
with con:
cur = con.cursor()
aQuery = """INSERT INTO vals (tagID, val) VALUES ('%d', '%f');""" % (r["id"], float(r["val"]))
# print(aQuery)
cur.execute(aQuery)
con.commit()
print("<saved>")
r["lastVal"] = r["val"]
print("-----------")
time.sleep(configProperties['scan_rate'])
except Exception as err:
print err
main()
if __name__ == '__main__':
main()

View File

@@ -1,276 +0,0 @@
#! /usr/bin/env python
# Copyright (C) 2014 Gayner Technical Services Pty Ltd
from ctypes import *
# PLC TYPES
Unknow=0
PLC=1
SLC500=2
LGX=3
# EIP DATA TYPES
PLC_BIT=1
PLC_BIT_STRING=2
PLC_BYTE_STRING=3
PLC_INTEGER=4
PLC_TIMER=5
PLC_COUNTER=6
PLC_CONTROL=7
PLC_FLOATING=8
PLC_ARRAY=9
PLC_ADRESS=15
PLC_BCD=16
# LOGIX DATA TYPES
LGX_BOOL=0xC1
LGX_BITARRAY=0xD3
LGX_SINT=0xC2
LGX_INT=0xC3
LGX_DINT=0xC4
LGX_REAL=0xCA
class Eip_Session(Structure):
_fields_ = [
('sock',c_int),
('Session_Handle', c_uint),
('Sender_ContextL',c_int),
('Sender_ContextH',c_int),
('timeout', c_int),
('references', c_int),
('Data', c_void_p),
]
class Eip_Connection(Structure):
_fields_ = [
('Eip_Session', Eip_Session),
('references', c_int),
('Data', c_void_p),
('ConnectionSerialNumber', c_uint),
('OriginatorVendorID', c_uint),
('OriginatorSerialNumber', c_int),
('OT_ConnID', c_int),
('TO_ConnID', c_int),
('packet', c_short),
('Path_size', c_byte)
]
class Eip_PLC_Read(Structure):
_fields_ = [
('type', c_int),
('Varcount', c_int),
('totalise', c_int),
('elementsize', c_int),
('mask', c_uint),
]
class TuxEIPException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class TuxEIP:
def __init__(self, **kwargs):
self.__libpath = kwargs.get("libpath", "libtuxeip.dylib")
self.__tuxeip = CDLL(self.__libpath)
self.__tuxeip._cip_err_msg.restype = c_char_p
def __del__(self):
del self.__tuxeip
def OpenSession(self, slaveip_, slaveport_=44818, slavetimeout_=1000):
self.__tuxeip._OpenSession.restype = POINTER(Eip_Session)
# Convert params to C types
slaveip = c_char_p(slaveip_)
slaveport = c_int(slaveport_)
slavetimeout = c_int(slavetimeout_)
session = self.__tuxeip._OpenSession(slaveip, slaveport, slavetimeout)
#print self.__tuxeip._cip_err_msg, self.__tuxeip._cip_errno, self.__tuxeip._cip_ext_errno
if bool(session) == False:
raise TuxEIPException("Could not open session to " + str(slaveip) + ":" + str(slaveport))
return session
def RegisterSession(self, sess_):
self.__tuxeip._RegisterSession.restype = c_int
reg = self.__tuxeip._RegisterSession(sess_)
if reg != False:
raise TuxEIPException("Could not register session")
return reg
def ConnectPLCOverCNET(self, sess_, plctype_, priority_, timeoutticks_, connid_, conserial_,
vendorid_, serialnum_, timeoutmult_, rpi_, transport_, slavepath_):
# Convert params to C types
priority = c_byte(priority_)
timeoutticks = c_byte(timeoutticks_)
connid = c_uint(connid_)
conserial = c_ushort(conserial_)
vendorid = c_ushort(vendorid_)
serialnum = c_uint(serialnum_)
timeutmult = c_byte(timeoutmult_)
rpi = c_uint(rpi_)
transport = c_byte(transport_)
slavepath = c_char_p(slavepath_)
pathlength = len(slavepath_)
self.__tuxeip._ConnectPLCOverCNET.restype = POINTER(Eip_Connection)
connection = self.__tuxeip._ConnectPLCOverCNET(
sess_,
plctype_,
priority,
timeoutticks,
connid,
conserial,
vendorid,
serialnum,
timeutmult,
rpi,
transport,
slavepath,
pathlength
)
if bool(connection) == False:
raise TuxEIPException("Could not connect to CPU")
return connection
def ReadLgxData(self, sess_, conn_, var_, num_):
self.__tuxeip._ReadLgxData.restype = POINTER(Eip_PLC_Read)
readdata = self.__tuxeip._ReadLgxData(sess_, conn_, var_, num_)
if bool(readdata) == False:
raise TuxEIPException("Read data failed")
return readdata
def WriteLGXData(self, sess_, conn_, address_, datatype_, data_, num_ ):
if datatype_ == LGX_INT or datatype_ == LGX_BOOL or datatype_ == LGX_DINT or datatype_ == LGX_SINT:
data = c_int(data_)
elif datatype_ == LGX_REAL:
data = c_float(data_)
else:
raise TuxEIPException("Write data failed")
data = self.__tuxeip._WriteLgxData(sess_, conn_, address_, datatype_, byref(data), num_)
return data
def ReadLGXDataAsFloat(self, sess_, conn_, var_, num_):
data = self.ReadLgxData(sess_, conn_, var_, num_)
d = self.GetLGXValueAsFloat(data)
self.FreePLCRead(data)
return d
def ReadLGXDataAsInteger(self, sess_, conn_, var_, num_):
data = self.ReadLgxData(sess_, conn_, var_, num_)
d = self.GetLGXValueAsInteger(data)
self.FreePLCRead(data)
return d
def ReadPLCDataAsFloat(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_):
data = self.ReadPLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_)
d = self.PCCC_GetValueAsFloat(data)
self.FreePLCRead(data)
return d
def ReadPLCDataAsInteger(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_):
data = self.ReadPLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_)
d = self.PCCC_GetValueAsInteger(data)
self.FreePLCRead(data)
return d
def ReadPLCData(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, number_):
self.__tuxeip._ReadPLCData.restype = POINTER(Eip_PLC_Read)
readdata = self.__tuxeip._ReadPLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_,
tns_, address_, number_)
if bool(readdata) == False:
raise TuxEIPException("Read data failed")
return readdata
def GetLGXValueAsFloat(self, readdata_):
if bool(readdata_) == False:
return None
self.__tuxeip._GetLGXValueAsFloat.restype = c_float
values = []
for i in range(0, readdata_.contents.Varcount):
v = self.__tuxeip._GetLGXValueAsFloat(readdata_, i)
values.append(v)
return values
def GetLGXValueAsInteger(self, readdata_):
if bool(readdata_) == False:
return None
self.__tuxeip._GetLGXValueAsInteger.restype = c_int
values = []
for i in range(0, readdata_.contents.Varcount):
v = self.__tuxeip._GetLGXValueAsInteger(readdata_, i)
values.append(v)
return values
def PCCC_GetValueAsFloat(self, readdata_):
if bool(readdata_) == False:
return None
self.__tuxeip._PCCC_GetValueAsFloat.restype = c_float
values = []
for i in range(0, readdata_.contents.Varcount):
v = self.__tuxeip._PCCC_GetValueAsFloat(readdata_, i)
values.append(v)
return values
def PCCC_GetValueAsInteger(self, readdata_):
if bool(readdata_) == False:
return None
self.__tuxeip._PCCC_GetValueAsInteger.restype = c_int
values = []
for i in range(0, readdata_.contents.Varcount):
v = self.__tuxeip._PCCC_GetValueAsInteger(readdata_, i)
values.append(v)
return values
def WritePLCData(self, sess_, conn_, dhp_, routepath_, routesize_, plctype_, tns_, address_, datatype_, data_, number_):
if datatype_ == PLC_INTEGER:
data = c_int(data_)
elif datatype_ == PLC_FLOATING:
data = c_float(data_)
else:
raise TuxEIPException("Variable type not supported" + str(datatype_))
result = self.__tuxeip._WritePLCData(sess_, conn_, dhp_, routepath_, routesize_, plctype_,
tns_, address_, datatype_, byref(data), number_)
return result
def Forward_Close(self, conn_):
self.__tuxeip._Forward_Close(conn_)
def UnRegisterSession(self, sess_):
self.__tuxeip._UnRegisterSession(sess_)
def CloseSession(self, sess_):
self.__tuxeip.CloseSession(sess_)
def FreePLCRead(self, data_):
self.__tuxeip._FreePLCRead(data_)

Binary file not shown.

133
sampleData.py Normal file
View File

@@ -0,0 +1,133 @@
#!/usr/bin/env python
'''
MySQL Tag Server
Created on April 7, 2016
@author: Patrick McDonagh
@description: Continuously loops through a list of tags to store values from a PLC into a MySQL database
'''
import mysql.connector as mysqlcon
import pickle
from tag.tag_mysql import Tag
import traceback
import time
import os
import random
class Sample(Tag):
def read(self, forceSend):
writeToDB = False
if self.tag:
v = 0.0
if not (self.value is None):
v = [self.value + (10.0 * (random.random() - 0.5))]
else:
v = [random.random() * 100.0]
if v:
val = v[0]
if self.data_type == 'BOOL' or self.data_type == 'STRING':
if self.mapFn:
val = self.mapFn[val]
if (self.last_send_time == 0) or (self.value is None) or not (self.value == val) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend is True):
self.last_value = self.value
self.value = val
writeToDB = True
else:
writeToDB = False
else:
if (self.last_send_time == 0) or (self.value is None) or (abs(self.value - v[0]) > self.chg_threshold) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend is True):
self.last_value = self.value
self.value = v[0]
writeToDB = True
else:
writeToDB = False
if forceSend is False:
writeToDB = False
if writeToDB:
self.sendToDB()
return self.value
with open(os.path.realpath('.') + '/mysql_cfg.pickle', 'rb') as pickleconfig:
mysql_cfg = pickle.load(pickleconfig)
if mysql_cfg:
db = mysqlcon.connect(**mysql_cfg)
tag_store = {}
configProperties = {}
def main():
db.connect()
cur = db.cursor()
query = "SELECT * FROM tags WHERE class = 5 AND deleted = 0"
cur.execute(query)
tags = cur.fetchall()
print tags
# [(1, u'Century Counter Up', 5, u'Century_Counter_Up', u'REAL', 10.0, 3600, None, 0)]
db.disconnect()
configObj = {}
db.connect()
cur = db.cursor()
query = "SELECT parameter, val FROM config GROUP BY parameter;"
cur.execute(query)
config = cur.fetchall()
db.disconnect()
for x in config:
configObj[x[0]] = x[1]
try:
configProperties['PLC_IP_ADDRESS'] = str(configObj['ip_address'])
print("FYI, using PLC IP Address from the database {0}".format(configProperties['PLC_IP_ADDRESS']))
except KeyError:
print("FYI, there is no PLC IP Address stored in the database, defaulting to 192.168.1.10")
configProperties['PLC_IP_ADDRESS'] = "192.168.1.10"
try:
configProperties['plc_type'] = str(configObj['plc_type'])
print("FYI, using PLC Type from the database {0}".format(configProperties['plc_type']))
except KeyError:
print("FYI, there is no PLC Type stored in the database, defaulting to CLX")
configProperties['plc_type'] = "CLX"
try:
configProperties['scan_rate'] = int(configObj['scan_rate'])
print("FYI, using Scan Rate from the database {0}".format(configProperties['scan_rate']))
except KeyError:
print("FYI, there is no Scan Rate stored in the database, defaulting to 10 seconds")
configProperties['scan_rate'] = 10
try:
sa_test = str(configObj['save_all'])
if sa_test.lower() == "true":
configProperties['save_all'] = True
elif sa_test.lower() == "false":
configProperties['save_all'] = False
else:
configProperties['save_all'] = "test"
print("FYI, value for save_all is {0}".format(configProperties['save_all']))
except KeyError:
print("FYI, there is no save_all value stored in the database, using 'test'")
configProperties['save_all'] = 'test'
for t in tags:
tag_store[t[1]] = Sample(t[1], t[3], t[0], t[5], t[6], t[7], mapFn=t[8], device_type=configProperties['plc_type'], ip_address=configProperties['PLC_IP_ADDRESS'])
while True:
for tag in tag_store:
try:
tag_store[tag].read(configProperties['save_all'])
except:
print("ERROR EVALUATING {}".format(tag))
traceback.print_exc()
time.sleep(configProperties['scan_rate'])
if __name__ == '__main__':
main()

1
tag Submodule

Submodule tag added at a2c3e581fe

97
tagserver_MySQL.py Normal file
View File

@@ -0,0 +1,97 @@
#!/usr/bin/env python
'''
MySQL Tag Server
Created on April 7, 2016
@author: Patrick McDonagh
@description: Continuously loops through a list of tags to store values from a PLC into a MySQL database
'''
import mysql.connector as mysqlcon
import pickle
from tag.tag_mysql import Tag
import traceback
import time
import os
with open(os.path.realpath('.') + '/mysql_cfg.pickle', 'rb') as pickleconfig:
mysql_cfg = pickle.load(pickleconfig)
if mysql_cfg:
db = mysqlcon.connect(**mysql_cfg)
tag_store = {}
configProperties = {}
def main():
db.connect()
cur = db.cursor()
query = "SELECT * FROM tags WHERE class = 5 AND deleted = 0"
cur.execute(query)
tags = cur.fetchall()
print tags
# [(1, u'Century Counter Up', 5, u'Century_Counter_Up', u'REAL', 10.0, 3600, None, 0)]
db.disconnect()
configObj = {}
db.connect()
cur = db.cursor()
query = "SELECT parameter, val FROM config GROUP BY parameter;"
cur.execute(query)
config = cur.fetchall()
db.disconnect()
for x in config:
configObj[x[0]] = x[1]
try:
configProperties['PLC_IP_ADDRESS'] = str(configObj['ip_address'])
print("FYI, using PLC IP Address from the database {0}".format(configProperties['PLC_IP_ADDRESS']))
except KeyError:
print("FYI, there is no PLC IP Address stored in the database, defaulting to 192.168.1.10")
configProperties['PLC_IP_ADDRESS'] = "192.168.1.10"
try:
configProperties['plc_type'] = str(configObj['plc_type'])
print("FYI, using PLC Type from the database {0}".format(configProperties['plc_type']))
except KeyError:
print("FYI, there is no PLC Type stored in the database, defaulting to CLX")
configProperties['plc_type'] = "CLX"
try:
configProperties['scan_rate'] = int(configObj['scan_rate'])
print("FYI, using Scan Rate from the database {0}".format(configProperties['scan_rate']))
except KeyError:
print("FYI, there is no Scan Rate stored in the database, defaulting to 10 seconds")
configProperties['scan_rate'] = 10
try:
sa_test = str(configObj['save_all'])
if sa_test.lower() == "true":
configProperties['save_all'] = True
elif sa_test.lower() == "false":
configProperties['save_all'] = False
else:
configProperties['save_all'] = "test"
print("FYI, value for save_all is {0}".format(configProperties['save_all']))
except KeyError:
print("FYI, there is no save_all value stored in the database, using 'test'")
configProperties['save_all'] = 'test'
for t in tags:
tag_store[t[1]] = Tag(t[1], t[3], t[0], t[5], t[6], t[7], mapFn=t[8], device_type=configProperties['plc_type'], ip_address=configProperties['PLC_IP_ADDRESS'])
while True:
for tag in tag_store:
try:
tag_store[tag].read(configProperties['save_all'])
except:
print("ERROR EVALUATING {}".format(tag))
traceback.print_exc()
time.sleep(configProperties['scan_rate'])
if __name__ == '__main__':
main()

96
tagserver_SQLite.py Normal file
View File

@@ -0,0 +1,96 @@
#!/usr/bin/env python
'''
Created on Dec 8, 2015
@author: Patrick McDonagh
'''
import time
import sqlite3 as lite
from tag.tag_sqlite import Tag
import traceback
# con = lite.connect("/usr/db/data.db")
con = lite.connect('/mnt/usb/data.db')
configProperties = {}
def main():
with con:
cur = con.cursor()
query = "SELECT * FROM tags WHERE deleted = 0;"
cur.execute(query)
tags = cur.fetchall()
configObj = {}
with con:
cur = con.cursor()
query = "SELECT parameter, val FROM config GROUP BY parameter;"
cur.execute(query)
config = cur.fetchall()
for x in config:
configObj[x[0]] = x[1]
try:
configProperties['PLC_IP_ADDRESS'] = str(configObj['ip_address'])
print("FYI, using PLC IP Address from the database {0}".format(configProperties['PLC_IP_ADDRESS']))
except KeyError:
print("FYI, there is no PLC IP Address stored in the database, defaulting to 192.168.1.10")
configProperties['PLC_IP_ADDRESS'] = "192.168.1.10"
try:
configProperties['plc_type'] = str(configObj['plc_type'])
print("FYI, using PLC Type from the database {0}".format(configProperties['plc_type']))
except KeyError:
print("FYI, there is no PLC Type stored in the database, defaulting to CLX")
configProperties['plc_type'] = "CLX"
try:
configProperties['scan_rate'] = int(configObj['scan_rate'])
print("FYI, using Scan Rate from the database {0}".format(configProperties['scan_rate']))
except KeyError:
print("FYI, there is no Scan Rate stored in the database, defaulting to 10 seconds")
configProperties['scan_rate'] = 10
try:
sa_test = str(configObj['save_all'])
if sa_test.lower() == "true":
configProperties['save_all'] = True
elif sa_test.lower() == "false":
configProperties['save_all'] = False
else:
configProperties['save_all'] = "test"
print("FYI, value for save_all is {0}".format(configProperties['save_all']))
except KeyError:
print("FYI, there is no save_all value stored in the database, using 'test'")
configProperties['save_all'] = 'test'
tag_store = {}
if len(tags) > 0:
for t in tags:
# (1, u'Pump Intake Pressure', u'5', u'Pump_Intake_Pressure', u'Pressure at the Intake of the Pump', None, 100.0, 3600, u'PSI', 0.0, 3000.0, u'2016-04-13 21:27:01', 0)
tag_store[t[1]] = Tag(t[1], t[3], t[0], t[5], t[6], t[7], mapFn=t[8], device_type=configProperties['plc_type'], ip_address=configProperties['PLC_IP_ADDRESS'])
while True:
for tag in tag_store:
try:
tag_store[tag].read(configProperties['save_all'])
except:
print("ERROR EVALUATING {}".format(tag))
traceback.print_exc()
time.sleep(configProperties['scan_rate'])
if __name__ == '__main__':
main()

101
www/app.coffee Normal file
View File

@@ -0,0 +1,101 @@
express = require('express')
path = require('path')
fs = require('fs')
logger = require('morgan')
methodOverride = require('method-override')
bodyParser = require('body-parser')
errorHandler = require('errorhandler')
app = express()
fns = undefined
app.locals.DB_TYPE = 'MySQL'
# or "MySQL"
###*
* Configuration
###
if app.locals.DB_TYPE == 'MySQL'
fns = require('./functions_MySQL.coffee')
mysql = require('mysql')
db_config =
host: 'localhost'
user: 'website'
password: 'henrypump'
database: 'poconsole'
app.locals.pool = mysql.createPool(db_config)
# handleDisconnect = ->
# console.log 'Handling db disconnect gracefully'
# app.locals.db = mysql.createConnection(db_config)
# app.locals.db.connect (err) ->
# if err
# console.log 'error when connecting to db:', err
# setTimeout handleDisconnect, 2000
# return
# app.locals.db.on 'error', (err) ->
# console.log 'db error', err
# if err.code == 'PROTOCOL_CONNECTION_LOST'
# handleDisconnect()
# else
# throw err
# return
# return
# handleDisconnect()
else
fns = require('./functions_SQLite.coffee')
app.set 'port', process.env.PORT or 80
app.set 'views', path.join(__dirname, 'views')
app.engine '.html', require('ejs').renderFile
app.set 'view engine', 'html'
#app.use(favicon(__dirname + '/public/img/favicon.ico'));
app.use logger('dev')
app.use methodOverride()
app.use bodyParser.json()
app.use bodyParser.urlencoded(extended: true)
#app.use(express["static"](path.join(__dirname, 'public')));
app.use express.static(__dirname + '/public')
app.use '/bower_components', express.static(__dirname + '/bower_components')
app.use '/node_modules', express.static(__dirname + '/node_modules')
###*
* Routes
###
angular = (req, res) ->
res.render 'angularIndex'
return
app.post '/json/tag/add', fns.createTag # Adds a tag to the scan list
app.post '/json/tag/update/', fns.updateTag # Updates tag data
app.get '/json/tag/delete/:tag', fns.deleteTag # Removes a tag from the scan list
app.get '/json/tag/:id', fns.getTag # Gets a specific tag in the scan list
app.get '/json/tag', fns.getAllTags # Lists all tags in the scan list
app.get '/json/val/:tag', fns.latestValueSingleTag # Gets the latest value of a single tag
app.get '/json/series/:tag/:hours', fns.seriesTagValues # Gets all the values of a tag for the last X hours
app.get '/json/valBetween/:tag/:startDatetime/:endDatetime', fns.seriesTagValuesBetween # Gets the values of a tag between the start time and end time
app.get '/json/CSV/all', fns.allDataCSV # Gets a CSV of all values stored
app.get '/json/CSV/:tag/:startDatetime/:endDatetime', fns.seriesCSVBetween # Gets a CSV of the values of a tag between the start time and end time
app.get '/json/CSV/:tag/:hours', fns.seriesCSV # Gets a CSV of the values of a tag for the last x hours
app.get '/json/all', fns.latestValueAllTags # Gets the latest values of all tags in the scan list
app.get '/json/config', fns.getSetup # Gets the contents of the config table
app.post '/json/config', fns.updateSetup # Adds a new parameter to the config table
app.get '/json/logger/status', fns.checkLoggerStatus # Gets the status of the data logger
app.get '/json/logger/restart', fns.restartLogger # Restarts the data logger
app.get '/json/clearDatabase/all', fns.clearValues # Removes all tag values from the database
app.get '/json/clearDatabase/:id', fns.clearValues # Removes tag values from the database
app.get '*', angular
###*
* Start Server
###
connectionsArray = []
s_port = 3000
server = app.listen(s_port, ->
host = server.address().address
port = server.address().port
console.log 'POConsole listening at http://%s:%s', host, port
return
)

View File

@@ -1,98 +0,0 @@
var express = require('express'),
path = require('path'),
fs = require('fs'),
logger = require('morgan'),
methodOverride = require('method-override'),
bodyParser = require('body-parser'),
errorHandler = require('errorhandler');
var app = express();
var fns;
app.locals.DB_TYPE = "SQLite"; // or "MySQL"
/**
* Configuration
*/
if (app.locals.DB_TYPE == "MySQL"){
fns = require('./functions_MySQL.js');
var mysql = require('mysql');
var db_config = {
host: 'localhost',
user: 'website',
password: 'henrypump'
};
var handleDisconnect = function () {
console.log("Handling db disconnect gracefully");
app.locals.db = mysql.createConnection(db_config);
app.locals.db.connect(function (err) {
if (err) {
console.log('error when connecting to db:', err);
setTimeout(handleDisconnect, 2000);
}
});
app.locals.db.on('error', function (err) {
console.log('db error', err);
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
handleDisconnect();
} else {
throw err;
}
});
};
handleDisconnect();
} else {
fns = require('./functions_SQLite.js');
}
app.set('port', process.env.PORT || 80);
app.set('views', path.join(__dirname, 'views'));
app.engine('.html', require('ejs').renderFile);
app.set('view engine', 'html');
//app.use(favicon(__dirname + '/public/img/favicon.ico'));
app.use(logger('dev'));
app.use(methodOverride());
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended: true}));
//app.use(express["static"](path.join(__dirname, 'public')));
app.use(express.static(__dirname + '/public'));
app.use('/bower_components', express.static(__dirname + '/bower_components'));
app.use('/node_modules', express.static(__dirname + '/node_modules'));
/**
* Routes
*/
var angular = function(req, res) {
res.render('angularIndex');
};
app.post('/json/tag/add', fns.createTag); // Adds a tag to the scan list
app.post('/json/tag/update/', fns.updateTag); // Updates tag data
app.get('/json/tag/delete/:tag', fns.deleteTag); // Removes a tag from the scan list
app.get('/json/tag/:id', fns.getTag); // Gets a specific tag in the scan list
app.get('/json/tag', fns.getAllTags); // Lists all tags in the scan list
app.get('/json/val/:tag', fns.latestValueSingleTag); // Gets the latest value of a single tag
app.get('/json/series/:tag/:hours', fns.seriesTagValues); // Gets all the values of a tag for the last X hours
app.get('/json/valBetween/:tag/:startDatetime/:endDatetime', fns.seriesTagValuesBetween); // Gets the values of a tag between the start time and end time
app.get('/json/CSV/all', fns.allDataCSV); // Gets a CSV of all values stored
app.get('/json/CSV/:tag/:startDatetime/:endDatetime', fns.seriesCSVBetween); // Gets a CSV of the values of a tag between the start time and end time
app.get('/json/CSV/:tag/:hours', fns.seriesCSV); // Gets a CSV of the values of a tag for the last x hours
app.get('/json/all', fns.latestValueAllTags); // Gets the latest values of all tags in the scan list
app.get('/json/config', fns.getSetup); // Gets the contents of the config table
app.post('/json/config', fns.updateSetup); // Adds a new parameter to the config table
app.get('/json/logger/status', fns.checkLoggerStatus); // Gets the status of the data logger
app.get('/json/logger/restart', fns.restartLogger); // Restarts the data logger
app.get('*', angular);
/**
* Start Server
*/
connectionsArray = [];
s_port = 3000;
var server = app.listen(s_port, function () {
var host = server.address().address;
var port = server.address().port;
console.log('POConsole listening at http://%s:%s', host, port);
});

View File

@@ -1,18 +1,87 @@
CREATE DATABASE TagData;
CREATE TABLE `TagData`.`tags` (
`id` INT NOT NULL AUTO_INCREMENT,
`tagName` VARCHAR(128) NULL,
`dateAdded` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`units` VARCHAR(16) NULL,
`deleted` INT NULL DEFAULT 0,
PRIMARY KEY (`id`));
CREATE DATABASE poconsole;
USE poconsole;
CREATE TABLE IF NOT EXISTS tag_classes(
id INT NOT NULL AUTO_INCREMENT,
tag_class varchar(64),
description varchar(64),
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS device_types(
id INT NOT NULL AUTO_INCREMENT,
dType VARCHAR(64),
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS devices(
id INT NOT NULL AUTO_INCREMENT,
name varchar(64),
device_type INT,
address VARCHAR(64),
PRIMARY KEY (id),
INDEX device_type_ind (device_type),
FOREIGN KEY (device_type)
REFERENCES device_types(id)
);
CREATE TABLE IF NOT EXISTS tags(
id INT NOT NULL AUTO_INCREMENT,
name varchar(128),
class INT,
tag varchar(128),
deviceID INT,
description varchar(128),
data_type varchar(32),
change_threshold float,
guarantee_sec INT,
map_function varchar(64),
units varchar(64),
minExpected varchar(64),
maxExpected varchar(64),
deleted INT NULL DEFAULT 0,
PRIMARY KEY (id),
INDEX class_ind (class),
FOREIGN KEY (class)
REFERENCES tag_classes(id)
ON DELETE CASCADE,
INDEX deviceID_ind (deviceID),
FOREIGN KEY (deviceID)
REFERENCES devices(id)
ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS tag_vals(
id INT NOT NULL AUTO_INCREMENT,
dtime datetime,
tagID int,
val float,
PRIMARY KEY (id),
INDEX tagID_ind (tagID),
FOREIGN KEY (tagID)
REFERENCES tags(id)
ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS config (
id INT NOT NULL AUTO_INCREMENT,
parameter varchar(128),
val varchar(128),
dateAdded TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (id)
);
INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (1, 'stroke', 'Stroke Information');
INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (2, 'history', 'Historical Data');
INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (3, 'gaugeoff', 'Gauge Off Data');
INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (4, 'welltest', 'Well Test Data');
INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (5, 'custom', 'Custom tags');
INSERT INTO poconsole.device_types (id, dType) VALUES (1, "CLX");
INSERT INTO poconsole.device_types (id, dType) VALUES (2, "Micro800");
INSERT INTO poconsole.device_types (id, dType) VALUES (3, "E300");
-- INSERT INTO poconsole.device_types (id, dType) VALUES (4, "PF755");
CREATE TABLE `TagData`.`values` (
`id` INT NOT NULL AUTO_INCREMENT,
`tagID` INT NULL,
`val` FLOAT NULL,
`dateAdded` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`));
CREATE USER 'website'@'localhost' IDENTIFIED BY 'henrypump';
GRANT ALL ON *.* TO 'website'@'localhost';
@@ -20,4 +89,4 @@ CREATE USER 'admin'@'localhost' IDENTIFIED BY 'henrypump';
GRANT ALL ON *.* to 'admin'@'localhost';
CREATE USER 'admin'@'%' IDENTIFIED BY 'henrypump';
GRANT ALL ON *.* to 'admin'@'%';
FLUSH PRIVILEGES;
FLUSH PRIVILEGES;

View File

@@ -1,8 +1,19 @@
CREATE TABLE IF NOT EXISTS tag_classes(
id INTEGER PRIMARY KEY,
tag_class TEXT,
description TEXT
);
CREATE TABLE IF NOT EXISTS tags (
id INTEGER PRIMARY KEY,
tagName TEXT,
vanityName TEXT,
name TEXT,
class TEXT,
tag TEXT,
description TEXT,
data_type TEXT,
change_threshold REAL,
guarantee_sec INTEGER,
map_function TEXT,
units TEXT,
minExpected REAL,
maxExpected REAL,
@@ -10,11 +21,11 @@ CREATE TABLE IF NOT EXISTS tags (
deleted INTEGER DEFAULT 0
);
CREATE TABLE IF NOT EXISTS vals (
CREATE TABLE IF NOT EXISTS tag_vals (
id INTEGER PRIMARY KEY,
tagID INTEGER,
val REAL,
dateAdded TIMESTAMP DEFAULT CURRENT_TIMESTAMP
dtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS config (
@@ -23,3 +34,9 @@ CREATE TABLE IF NOT EXISTS config (
val TEXT,
dateAdded TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO tag_classes (id, tag_class, description) VALUES (1, 'stroke', 'Stroke Information');
INSERT INTO tag_classes (id, tag_class, description) VALUES (2, 'history', 'Historical Data');
INSERT INTO tag_classes (id, tag_class, description) VALUES (3, 'gaugeoff', 'Gauge Off Data');
INSERT INTO tag_classes (id, tag_class, description) VALUES (4, 'welltest', 'Well Test Data');
INSERT INTO tag_classes (id, tag_class, description) VALUES (5, 'custom', 'Custom tags');

498
www/functions_MySQL.coffee Normal file
View File

@@ -0,0 +1,498 @@
# var dbFile = "/usr/db/data.db";
dbFile = '/mnt/usb/data.db'
# var dbFile = '/Users/patrickjmcd/data.db';
dString_to_sqlite = (dString) ->
###*
* Takes a date string in the form YYYYMMDD_HHmmSS and returns it in SQLite format (YYYY-MM-DD HH:mm:SS)
* @param {String} dString
* @return {String} sqliteString
###
re = /(\d{4})(\d{2})(\d{2})_(\d{2})(\d{2})(\d{2})/
fd = re.exec(dString)
if fd
sqliteString = ''
sqliteString.concat fd[1], '-', fd[2], '-', fd[3], ' ', fd[4], ':', fd[5], ':', fd[6]
else
null
sqlite_to_dString = (sqliteDate) ->
###*
* Takes a sqlite date string in the form YYYY-MM-DD HH:mm:SS and returns it in format YYYYMMDD_HHmmSS
* @param {String} sqliteDate
* @return {String} dString
###
re = /(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})/
fd = re.exec(sqliteDate)
if fd
dString = ''
dString.concat fd[1], fd[2], fd[3], '_', fd[4], fd[5], fd[6]
else
null
getAllTags = (pool, callback) ->
pool.getConnection (err, db)->
query = 'SELECT * FROM tags WHERE deleted = 0'
db.query query, (err, rows, fields) ->
db.release()
if err
return callback(err, null)
console.log err
else
return callback(null, rows)
undefined
exports.getAllTags = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT * FROM tags WHERE deleted = 0'
db.query query, (err, rows, fields) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tags: rows
undefined
exports.createTag = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'INSERT INTO tags (tag, units, minExpected, maxExpected, name, description, class, guarantee_sec, change_threshold, data_type) VALUES (?, ?, ?, ?, ?, ?, 5, ?, ?, ?)'
db.query query, [req.body.tag, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.name, req.body.description, req.body.guarantee_sec, req.body.change_threshold, req.body.data_type], (err, results) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json status: 'OK'
undefined
exports.getTag = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT * FROM tags WHERE id = ?'
db.query query, [req.params.id], (err, rows) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tags: rows
undefined
exports.updateTag = (req, res) ->
console.log(req.body)
req.app.locals.pool.getConnection (err, db) ->
query = 'UPDATE tags set tag = ?, units = ?, minExpected = ?, maxExpected = ?, name = ?, description = ?, guarantee_sec = ?, change_threshold = ?, data_type = ? WHERE id = ?'
db.query query, [req.body.tag, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.name, req.body.description, req.body.guarantee_sec, req.body.change_threshold, req.body.data_type, req.body.id], (err, results) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
undefined
exports.deleteTag = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'UPDATE tags SET deleted = 1 WHERE id = ?'
db.query query, [req.params.tag], (err, results) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
undefined
exports.seriesTagValues = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dateAdded >= DATE_SUB(NOW(), INTERVAL 1 HOUR)'
db.query query, [parseInt(req.params.tag)], (err, rows) ->
db.release()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
res.json
status: 'OK'
tag: req.params.tag
vals: rows
undefined
exports.seriesTagValuesBetween = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dtime >= ? AND dtime <= ?'
db.query query, [parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime)], (err, rows) ->
db.release()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
res.json
status: 'OK'
tag: req.params.tag
startDatetime: dString_to_sqlite(req.params.startDatetime)
endDatetime: dString_to_sqlite(req.params.endDatetime)
vals: rows
undefined
createCSVrow = (header, dataRow) ->
i = header.indexOf(dataRow.name)
csvRow = dataRow.id.toString() + ',' + dataRow.dtime + ','
if i >= 0
j = 2
while j < header.length
if j == i
csvRow = csvRow + dataRow.val.toString() + ','
else
csvRow = csvRow + ','
j++
csvRow = csvRow.slice(0, -1) + '\u000d'
return csvRow
return
exports.allDataCSV = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID'
db.query query, (err, rows) ->
db.release()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
getAllTags req.app.locals.pool, (err, tags) ->
if err
console.log err
else
csvString = ''
da = [
'id'
'DateAdded'
]
tagVanityNames = tags.map((t) ->
t.name
)
h = da.concat(tagVanityNames)
console.log h
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + createCSVrow(h, rows[i])
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
undefined
exports.seriesCSV = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT v.id, t.name, v.val, v.dateAdded FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND v.dateAdded > DATETIME(\'now\', \'-1 HOUR\')'
db.query query, [parseInt(req.params.tag)], (err, rows) ->
db.release()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
csvString = ''
h = [
'id'
'DateAdded'
rows[0].vanityName
]
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + [
rows[i].id
rows[i].dateAdded
rows[i].val
].join(',') + '\u000d'
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
undefined
exports.seriesCSVBetween = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND v.dtime >= ? AND v.dtime <= ?'
db.query query, [parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime)], (err, rows) ->
db.release()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
csvString = ''
h = [
'id'
'DateAdded'
rows[0].name
]
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + [
rows[i].id
rows[i].dtime
rows[i].val
].join(',') + '\u000d'
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
undefined
exports.latestValueSingleTag = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT * FROM tag_vals WHERE id = (SELECT MAX(id) FROM tag_vals WHERE tagID = ?)'
db.query query, [req.params.tag], (err, rows) ->
db.release()
console.log rows
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tag_val: rows[0]
undefined
exports.latestValueAllTags = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT v1.id as id, v1.dtime as dtime, t.id as t_id, t.name as name, t.tag as tag, v1.val as val, t.units as units, t.description as description, t.minExpected as minExpected, t.maxExpected as maxExpected FROM tag_vals v1 LEFT JOIN tags t ON t.id = v1.tagID WHERE v1.id = (SELECT v2.id FROM tag_vals v2 WHERE v2.tagID = v1.tagID ORDER BY v2.id DESC LIMIT 1)'
db.query query, [req.params.id], (err, rows) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
vals: rows
undefined
exports.checkLoggerStatus = (req, res) ->
fs = require('fs')
# var ps = require("ps-node");
running = require('is-running')
fs.readFile '/root/tagserver.pid', (derr, ddata) ->
if derr
console.log 'Problem getting PID of tagserver'
res.json
status: 'error'
message: 'Problem getting PID of tagserver'
else
res.json
status: 'OK'
running: running(ddata)
return
return
exports.restartLogger = (req, res) ->
exec = require('child_process').exec
exec '/etc/init.d/loggers start', (error, stdout, stderr) ->
if error
res.json
status: 'error'
message: error
else
res.json
status: 'OK'
return
return
exports.getSetup = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT parameter, val, dateAdded FROM config GROUP BY parameter;'
db.query query, [req.params.id], (err, rows) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
config: rows
undefined
exports.updateSetup = (req, res) ->
exec = require('child_process').exec
req.app.locals.pool.getConnection (err, db) ->
query = 'INSERT INTO config (parameter, val) VALUES (?, ?)';
db.query query, [req.body.parameter, req.body.val], (err) ->
db.release()
if err
console.log runErr: err
res.json
status: 'error'
message: err
query: query
else
res.redirect '/#/setup'
exec '/etc/init.d/loggers stop', (error, stdout, stderr) ->
if error
console.log
status: 'error'
message: error
query: query
setTimeout (->
exec '/etc/init.d/loggers start', (error, stdout, stderr) ->
if error
console.log
status: 'error'
message: error
query: query
), 5000
undefined
exports.clearValues = (req, res) ->
if req.params.id
req.app.locals.pool.getConnection (err, db) ->
query = 'DELETE FROM tag_vals WHERE tagID = ?;';
db.query query, [req.params.id], (err) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
else
req.app.locals.pool.getConnection (err, db) ->
query = 'DELETE FROM tag_vals WHERE id >= 0;';
db.query query, (err) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
undefined
exports.getAllDevices = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT d.id, d.name, d.address, t.dType as device_type FROM devices d JOIN device_types t ON d.device_type = t.id;'
db.query query (err, rows, fields) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
devices: rows
undefined
exports.createDevice = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'INSERT INTO devices (name, device_type, address) VALUES (?, ?, ?)'
db.query query, [req.body.name, req.body.device_type, req.body.address,], (err, results) ->
if err
res.json
status: 'error'
message: err
console.log err
else
res.json status: 'OK'
undefined
exports.getDevice = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'SELECT d.id, d.name, d.address, t.dType as device_type FROM devices d JOIN device_types t ON d.device_type = t.id WHERE d.id = ?'
db.query query, [req.params.id], (err, rows) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
devices: rows
undefined
exports.updateDevice = (req, res) ->
console.log(req.body)
req.app.locals.pool.getConnection (err, db) ->
query = 'UPDATE tags set name = ?, device_type = ?, address = ? WHERE id = ?'
db.query query, [req.body.name, req.body.device_type, req.body.address, req.body.id], (err, results) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
undefined
exports.deleteDevice = (req, res) ->
req.app.locals.pool.getConnection (err, db) ->
query = 'DELETE FROM devices WHERE id = ?'
db.query query, [req.params.id], (err, results) ->
db.release()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
undefined

View File

@@ -1,80 +0,0 @@
// app.get('/json/add/:tag', fns.addTag); // Adds a tag to the scan list
// app.get('/json/remove/:tag', fns.removeTag); // Removes a tag from the scan list
// app.get('/json/val/:tag', fns.latestTagValue); // Gets the latest value of a single tag
// app.get('/json/series/:tag/:hours', fns.seriesTagValues); // Gets all the values of a tag for the last X hours
// app.get('/json/tags', fns.allTags); // Lists all tags in the scan list
// app.get('/json/all', fns.allValues); // Gets the latest values of all tags in the scan list
var getScanList = function(sl){
var query = "SELECT * FROM TagData.tags WHERE deleted = 0;";
};
exports.addTag = function(req, res){
var vals = {
tagName: req.params.tagName,
units: req.params.units,
};
var query = "INSERT INTO TagData.tags SET ?";
req.app.locals.db.query(query, vals, function(err, rows, fields) {
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK"});
}
});
};
exports.removeTag = function(req, res){
var query = "UPDATE TagData.tags SET deleted = 1 WHERE id = " + parseInt(req.params.tag) + ";";
req.app.locals.db.query(query, function(err, rows, fields) {
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK"});
}
});
};
exports.latestTagValue = function(req, res){
var query = "SELECT * FROM TagData.values WHERE id = (SELECT MAX(id) FROM TagData.values WHERE tagID = (SELECT id FROM TagData.tags WHERE tagName = '" + req.params.tag + "'));";
req.app.locals.db.query(query, function(err, rows, fields) {
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK", tag_val:rows[0]});
}
});
};
exports.seriesTagValues = function(req, res){
var query = "SELECT * FROM TagData.values WHERE tagID = "+ req.params.tag +" AND dateAdded > DATE_SUB(NOW(),INTERVAL "+ req.params.hours +" HOUR)";
req.app.locals.db.query(query, function(err, rows, fields) {
if (err){
console.log(err);
res.json({status:"error", message:err, query:query});
} else {
res.json({status:"OK", tag: req.params.tag, values:rows});
}
});
};
exports.allTags = function(req, res){
var query = "SELECT * FROM TagData.tags WHERE deleted = 0;";
req.app.locals.db.query(query, function(err, rows, fields) {
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK", tags:rows});
}
});
};
exports.allValues = function(req, res){
res.json(status:"error", message:"not implemented"});
};

517
www/functions_SQLite.coffee Normal file
View File

@@ -0,0 +1,517 @@
# var dbFile = "/usr/db/data.db";
dbFile = '/mnt/usb/data.db'
# var dbFile = '/Users/patrickjmcd/data.db';
dString_to_sqlite = (dString) ->
###*
* Takes a date string in the form YYYYMMDD_HHmmSS and returns it in SQLite format (YYYY-MM-DD HH:mm:SS)
* @param {String} dString
* @return {String} sqliteString
###
re = /(\d{4})(\d{2})(\d{2})_(\d{2})(\d{2})(\d{2})/
fd = re.exec(dString)
if fd
sqliteString = ''
sqliteString.concat fd[1], '-', fd[2], '-', fd[3], ' ', fd[4], ':', fd[5], ':', fd[6]
else
null
sqlite_to_dString = (sqliteDate) ->
###*
* Takes a sqlite date string in the form YYYY-MM-DD HH:mm:SS and returns it in format YYYYMMDD_HHmmSS
* @param {String} sqliteDate
* @return {String} dString
###
re = /(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})/
fd = re.exec(sqliteDate)
if fd
dString = ''
dString.concat fd[1], fd[2], fd[3], '_', fd[4], fd[5], fd[6]
else
null
getAllTags = (callback) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'SELECT * FROM tags WHERE deleted = 0'
prepQuery = db.prepare(query)
prepQuery.all (err, rows) ->
prepQuery.finalize()
db.close()
if err
return callback(err, null)
console.log err
else
return callback(null, rows)
return
return
return
exports.getAllTags = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'SELECT * FROM tags WHERE deleted = 0'
prepQuery = db.prepare(query)
prepQuery.all (err, rows) ->
prepQuery.finalize()
db.close()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tags: rows
return
return
return
exports.createTag = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'INSERT INTO tags (tag, units, minExpected, maxExpected, name, description, class, guarantee_sec, change_threshold, data_type) VALUES (?, ?, ?, ?, ?, ?, 5, ?, ?, ?)'
prepQuery = db.prepare(query)
prepQuery.run req.body.tag, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.name, req.body.description, req.body.guarantee_sec, req.body.change_threshold, req.body.data_type, (err) ->
prepQuery.finalize()
db.close()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json status: 'OK'
return
return
return
exports.getTag = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'SELECT * FROM tags WHERE id = ?'
prepQuery = db.prepare(query)
prepQuery.all req.params.id, (err, rows) ->
prepQuery.finalize()
db.close()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tags: rows
return
return
return
exports.updateTag = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'UPDATE tags set tag = ?, units = ?, minExpected = ?, maxExpected = ?, name = ?, description = ?, guarantee_sec = ?, change_threshold = ?, data_type = ? WHERE id = ?'
prepQuery = db.prepare(query)
prepQuery.run req.body.tag, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.name, req.body.description, req.body.guarantee_sec, req.body.change_threshold, req.body.data_type, req.body.id, (err) ->
prepQuery.finalize()
db.close()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
return
return
return
exports.deleteTag = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'UPDATE tags SET deleted = 1 WHERE id = ?'
prepQuery = db.prepare(query)
prepQuery.run req.params.tag, (err) ->
prepQuery.finalize()
db.close()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
return
return
return
exports.seriesTagValues = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dtime > DATETIME(\'now\', \'-1 HOUR\')'
prepQuery = db.prepare(query)
prepQuery.all parseInt(req.params.tag), (err, rows) ->
prepQuery.finalize()
db.close()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
res.json
status: 'OK'
tag: req.params.tag
vals: rows
return
return
return
exports.seriesTagValuesBetween = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'SELECT * FROM tag_vals WHERE tagID = ? AND dtime >= DATETIME(?) AND dtime <= DATETIME(?)'
prepQuery = db.prepare(query)
prepQuery.all parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime), (err, rows) ->
prepQuery.finalize()
db.close()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
res.json
status: 'OK'
tag: req.params.tag
startDatetime: dString_to_sqlite(req.params.startDatetime)
endDatetime: dString_to_sqlite(req.params.endDatetime)
vals: rows
return
return
return
createCSVrow = (header, dataRow) ->
i = header.indexOf(dataRow.name)
csvRow = dataRow.id.toString() + ',' + dataRow.dtime + ','
if i >= 0
j = 2
while j < header.length
if j == i
csvRow = csvRow + dataRow.val.toString() + ','
else
csvRow = csvRow + ','
j++
csvRow = csvRow.slice(0, -1) + '\u000d'
return csvRow
return
exports.allDataCSV = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID'
prepQuery = db.prepare(query)
prepQuery.all (err, rows) ->
prepQuery.finalize()
db.close()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
getAllTags (err, tags) ->
if err
console.log err
else
csvString = ''
da = [
'id'
'dtime'
]
tagnames = tags.map((t) ->
t.name
)
h = da.concat(tagnames)
console.log h
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + createCSVrow(h, rows[i])
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
return
return
return
return
exports.seriesCSV = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND v.dtime > DATETIME(\'now\', \'-1 HOUR\')'
prepQuery = db.prepare(query)
prepQuery.all parseInt(req.params.tag), (err, rows) ->
prepQuery.finalize()
db.close()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
csvString = ''
h = [
'id'
'dtime'
rows[0].name
]
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + [
rows[i].id
rows[i].dtime
rows[i].val
].join(',') + '\u000d'
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
return
return
return
exports.seriesCSVBetween = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'SELECT v.id, t.name, v.val, v.dtime FROM tags t JOIN tag_vals v ON t.id = v.tagID WHERE tagID = ? AND dtime >= DATETIME(?) AND dtime <= DATETIME(?)'
prepQuery = db.prepare(query)
prepQuery.all parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime), (err, rows) ->
prepQuery.finalize()
db.close()
if err
console.log err
res.json
status: 'error'
message: err
query: query
else
csvString = ''
h = [
'id'
'dtime'
rows[0].name
]
csvString = csvString + h.join(',') + '\u000d'
i = 0
while i < rows.length
csvString = csvString + [
rows[i].id
rows[i].dtime
rows[i].val
].join(',') + '\u000d'
i++
res.set 'Content-Type', 'text/csv'
res.set 'Content-Disposition', 'attachment;filename=tagdata.csv'
res.send csvString
return
return
return
exports.latestValueSingleTag = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'SELECT * FROM tag_vals WHERE id = (SELECT MAX(id) FROM tag_vals WHERE tagID = ?)'
prepQuery = db.prepare(query)
prepQuery.all req.params.tag, (err, rows) ->
console.log rows
prepQuery.finalize()
db.close()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
tag_val: rows[0]
return
return
return
exports.latestValueAllTags = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
db.serialize ->
query = 'SELECT t.tag as tag, t.name as name, t.description as description, t.units as units, t.id as t_id, t.minExpected as min, t.maxExpected as max, MAX(v.id) as v_id, v.val as val, v.dtime as dtime FROM tag_vals v JOIN tags t ON v.tagID = t.id WHERE t.deleted = 0 GROUP BY v.tagID'
prepQuery = db.prepare(query)
prepQuery.all req.params.id, (err, rows) ->
prepQuery.finalize()
db.close()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
vals: rows
return
return
return
exports.checkLoggerStatus = (req, res) ->
fs = require('fs')
# var ps = require("ps-node");
running = require('is-running')
fs.readFile '/root/tagserver.pid', (derr, ddata) ->
if derr
console.log 'Problem getting PID of tagserver'
res.json
status: 'error'
message: 'Problem getting PID of tagserver'
else
res.json
status: 'OK'
running: running(ddata)
return
return
exports.restartLogger = (req, res) ->
exec = require('child_process').exec
exec '/etc/init.d/loggers start', (error, stdout, stderr) ->
if error
res.json
status: 'error'
message: error
else
res.json
status: 'OK'
return
return
exports.getSetup = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
query = 'SELECT parameter, val, dateAdded FROM config GROUP BY parameter;'
prepQuery = db.prepare(query)
prepQuery.all req.params.id, (err, rows) ->
prepQuery.finalize()
db.close()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
config: rows
return
return
exports.updateSetup = (req, res) ->
exec = require('child_process').exec
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
console.log req.body.parameter, req.body.val
db.serialize ->
query = db.prepare('INSERT INTO config (parameter, val) VALUES (?, ?)')
query.run req.body.parameter, req.body.val, (err) ->
query.finalize()
db.close()
if err
console.log runErr: err
res.json
status: 'error'
message: err
query: query
else
res.redirect '/#/setup'
exec '/etc/init.d/loggers stop', (error, stdout, stderr) ->
if error
console.log
status: 'error'
message: error
query: query
setTimeout (->
exec '/etc/init.d/loggers start', (error, stdout, stderr) ->
if error
console.log
status: 'error'
message: error
query: query
return
return
), 5000
return
return
return
return
exports.clearValues = (req, res) ->
sqlite3 = require('sqlite3').verbose()
db = new (sqlite3.Database)(dbFile)
if req.params.id
db.serialize () ->
query = 'DELETE FROM tag_vals WHERE tagID = ?;';
prepQuery = db.prepare(query)
prepQuery.run req.params.id, (err) ->
prepQuery.finalize()
db.close()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
else
db.serialize () ->
query = 'DELETE FROM tag_vals WHERE id >= 0;';
prepQuery = db.prepare(query)
prepQuery.run req.params.id, (err) ->
prepQuery.finalize()
db.close()
if err
res.json
status: 'error'
message: err
console.log err
else
res.json
status: 'OK'
undefined

View File

@@ -1,423 +0,0 @@
// var dbFile = "/usr/db/data.db";
var dbFile = "/mnt/usb/data.db";
// var dbFile = '/Users/patrickjmcd/data.db';
var dString_to_sqlite = function(dString){
/**
* Takes a date string in the form YYYYMMDD_HHmmSS and returns it in SQLite format (YYYY-MM-DD HH:mm:SS)
* @param {String} dString
* @return {String} sqliteString
*/
var re = /(\d{4})(\d{2})(\d{2})_(\d{2})(\d{2})(\d{2})/;
var fd = re.exec(dString);
if (fd){
var sqliteString = "";
return sqliteString.concat(fd[1], "-", fd[2], "-", fd[3], " ", fd[4], ":", fd[5], ":", fd[6]);
} else {
return null;
}
};
var sqlite_to_dString = function(sqliteDate){
/**
* Takes a sqlite date string in the form YYYY-MM-DD HH:mm:SS and returns it in format YYYYMMDD_HHmmSS
* @param {String} sqliteDate
* @return {String} dString
*/
var re = /(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})/;
var fd = re.exec(sqliteDate);
if (fd){
var dString = "";
return dString.concat(fd[1], fd[2], fd[3], "_", fd[4], fd[5], fd[6]);
} else {
return null;
}
};
var getAllTags = function(callback){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "SELECT * FROM tags WHERE deleted = 0";
var prepQuery = db.prepare(query);
prepQuery.all(function(err, rows) {
prepQuery.finalize();
db.close();
if (err) {
return callback(err, null);
console.log(err);
} else {
return callback(null, rows);
}
});
});
};
exports.getAllTags = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "SELECT * FROM tags WHERE deleted = 0";
var prepQuery = db.prepare(query);
prepQuery.all(function(err, rows) {
prepQuery.finalize();
db.close();
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK", tags:rows});
}
});
});
};
exports.createTag = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "INSERT INTO tags (tagName, units, minExpected, maxExpected, vanityName, description) VALUES (?, ?, ?, ?, ?, ?)";
var prepQuery = db.prepare(query);
prepQuery.run(req.body.tagName, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.vanityName, req.body.description, function(err){
prepQuery.finalize();
db.close();
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK"});
}
});
});
};
exports.getTag = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "SELECT * FROM tags WHERE id = ?";
var prepQuery = db.prepare(query);
prepQuery.all(req.params.id, function(err, rows) {
prepQuery.finalize();
db.close();
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK", tags:rows});
}
});
});
};
exports.updateTag = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "UPDATE tags set tagName = ?, units = ?, minExpected = ?, maxExpected = ?, vanityName = ?, description = ? WHERE id = ?";
var prepQuery = db.prepare(query);
prepQuery.run(req.body.tagName, req.body.units, req.body.minExpected, req.body.maxExpected, req.body.vanityName, req.body.description, req.body.id, function(err) {
prepQuery.finalize();
db.close();
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK"});
}
});
});
};
exports.deleteTag = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "UPDATE tags SET deleted = 1 WHERE id = ?";
var prepQuery = db.prepare(query);
prepQuery.run(req.params.tag, function(err) {
prepQuery.finalize();
db.close();
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK"});
}
});
});
};
exports.seriesTagValues = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "SELECT * FROM vals WHERE tagID = ? AND dateAdded > DATETIME('now', '-1 HOUR')";
var prepQuery = db.prepare(query);
prepQuery.all(parseInt(req.params.tag), function(err, rows){
prepQuery.finalize();
db.close();
if (err){
console.log(err);
res.json({status:"error", message:err, query:query});
} else {
res.json({status:"OK", tag: req.params.tag, vals:rows});
}
});
});
};
exports.seriesTagValuesBetween = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "SELECT * FROM vals WHERE tagID = ? AND dateAdded >= DATETIME(?) AND dateAdded <= DATETIME(?)";
var prepQuery = db.prepare(query);
prepQuery.all(parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime), function(err, rows){
prepQuery.finalize();
db.close();
if (err){
console.log(err);
res.json({status:"error", message:err, query:query});
} else {
res.json({status:"OK", tag: req.params.tag, startDatetime: dString_to_sqlite(req.params.startDatetime), endDatetime: dString_to_sqlite(req.params.endDatetime), vals:rows});
}
});
});
};
var createCSVrow = function(header, dataRow){
var i = header.indexOf(dataRow.vanityName);
var csvRow = dataRow.id.toString() + "," + dataRow.dateAdded + ",";
if (i >= 0){
for (var j = 2; j < header.length; j++){
if (j == i){
csvRow = csvRow + dataRow.val.toString()+ ",";
} else {
csvRow = csvRow + ",";
}
}
csvRow = csvRow.slice(0, -1) + "\r";
return csvRow;
}
};
exports.allDataCSV = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "SELECT v.id, t.vanityName, v.val, v.dateAdded FROM tags t JOIN vals v ON t.id = v.tagID";
var prepQuery = db.prepare(query);
prepQuery.all( function(err, rows){
prepQuery.finalize();
db.close();
if (err){
console.log(err);
res.json({status:"error", message:err, query:query});
} else {
getAllTags(function(err,tags){
if (err){
console.log(err);
} else {
var csvString = "";
var da = ["id", "DateAdded"];
var tagVanityNames = tags.map(function(t){return t.vanityName;});
var h = da.concat(tagVanityNames);
console.log(h);
csvString = csvString + h.join(",") + "\r";
for (var i= 0; i < rows.length; i++){
csvString = csvString + createCSVrow(h, rows[i]);
}
res.set('Content-Type', 'text/csv');
res.set('Content-Disposition', "attachment;filename=tagdata.csv");
res.send(csvString);
}
});
}
});
});
};
exports.seriesCSV = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "SELECT v.id, t.vanityName, v.val, v.dateAdded FROM tags t JOIN vals v ON t.id = v.tagID WHERE tagID = ? AND v.dateAdded > DATETIME('now', '-1 HOUR')";
var prepQuery = db.prepare(query);
prepQuery.all(parseInt(req.params.tag), function(err, rows){
prepQuery.finalize();
db.close();
if (err){
console.log(err);
res.json({status:"error", message:err, query:query});
} else {
var csvString = "";
var h = ["id", "DateAdded", rows[0].vanityName];
csvString = csvString + h.join(",") + "\r";
for (var i= 0; i < rows.length; i++){
csvString = csvString + [rows[i].id, rows[i].dateAdded, rows[i].val].join(",") + "\r";
}
res.set('Content-Type', 'text/csv');
res.set('Content-Disposition', "attachment;filename=tagdata.csv");
res.send(csvString);
}
});
});
};
exports.seriesCSVBetween = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "SELECT v.id, t.vanityName, v.val, v.dateAdded FROM tags t JOIN vals v ON t.id = v.tagID WHERE tagID = ? AND dateAdded >= DATETIME(?) AND dateAdded <= DATETIME(?)";
var prepQuery = db.prepare(query);
prepQuery.all(parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime), function(err, rows){
prepQuery.finalize();
db.close();
if (err){
console.log(err);
res.json({status:"error", message:err, query:query});
} else {
var csvString = "";
var h = ["id", "DateAdded", rows[0].vanityName];
csvString = csvString + h.join(",") + "\r";
for (var i= 0; i < rows.length; i++){
csvString = csvString + [rows[i].id, rows[i].dateAdded, rows[i].val].join(",") + "\r";
}
res.set('Content-Type', 'text/csv');
res.set('Content-Disposition', "attachment;filename=tagdata.csv");
res.send(csvString);
}
});
});
};
exports.latestValueSingleTag = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "SELECT * FROM vals WHERE id = (SELECT MAX(id) FROM vals WHERE tagID = ?)";
var prepQuery = db.prepare(query);
prepQuery.all(req.params.tag, function(err, rows) {
console.log(rows);
prepQuery.finalize();
db.close();
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK", tag_val:rows[0]});
}
});
});
};
exports.latestValueAllTags = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
db.serialize(function(){
var query = "SELECT t.tagName as tagName, t.vanityName as vanityName, t.description as description, t.units as units, t.id as t_id, t.minExpected as min, t.maxExpected as max, MAX(v.id) as v_id, v.val as val, v.dateAdded as dtime FROM vals v JOIN tags t ON v.tagID = t.id WHERE t.deleted = 0 GROUP BY v.tagID";
var prepQuery = db.prepare(query);
prepQuery.all(req.params.id, function(err, rows) {
prepQuery.finalize();
db.close();
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK", vals:rows});
}
});
});
};
exports.checkLoggerStatus = function(req, res){
var fs = require('fs');
// var ps = require("ps-node");
var running = require("is-running");
fs.readFile('/root/tagserver.pid', function (derr,ddata) {
if (derr) {
console.log("Problem getting PID of tagserver");
res.json({status:"error", message: "Problem getting PID of tagserver"});
} else {
res.json({status: "OK", running: running(ddata)});
}
});
};
exports.restartLogger = function(req, res){
var exec = require('child_process').exec;
exec('/etc/init.d/loggers start', function(error, stdout, stderr){
if (error){
res.json({status:"error", message:error});
} else {
res.json({status:"OK"});
}
});
};
exports.getSetup = function(req, res){
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
var query = "SELECT parameter, val, dateAdded FROM config GROUP BY parameter;";
var prepQuery = db.prepare(query);
prepQuery.all(req.params.id, function(err, rows) {
prepQuery.finalize();
db.close();
if (err) {
res.json({status:"error", message:err});
console.log(err);
} else {
res.json({status:"OK", config:rows});
}
});
};
exports.updateSetup = function(req, res){
var exec = require('child_process').exec;
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(dbFile);
console.log(req.body.parameter, req.body.val);
db.serialize(function(){
var query = db.prepare('INSERT INTO config (parameter, val) VALUES (?, ?)');
query.run(req.body.parameter, req.body.val, function(err) {
query.finalize();
db.close();
if (err) {
console.log({runErr:err});
res.json({status:"error", message:err, query:query});
} else {
res.redirect("/#/setup");
exec('/etc/init.d/loggers stop', function(error, stdout, stderr){
if (error){
console.log({status:"error", message:error, query:query});
}
setTimeout(function(){
exec('/etc/init.d/loggers start', function(error, stdout, stderr){
if (error){
console.log({status:"error", message:error, query:query});
}
});
},5000);
});
}
});
});
};

View File

@@ -14,7 +14,8 @@
"serve-favicon": "*",
"sqlite3": "*",
"n3-charts": "*",
"is-running": "*"
"is-running": "*",
"coffee-script": "*"
},
"devDependencies": {},
"scripts": {

15
www/public/css/app.css Normal file
View File

@@ -0,0 +1,15 @@
.topMargin40 {
margin-top: 40px;
}
.row-flex {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
flex-wrap: wrap;
}
.row-flex > [class*='col-'] {
display: flex;
flex-direction: column;
}

View File

@@ -54,12 +54,12 @@ var sqlite_to_dString = function(sqliteDate){
};
var date_to_dString = function(inpDate){
var year = inpDate.getUTCFullYear().pad(4);
var month = (inpDate.getUTCMonth() + 1).pad(2);
var day = inpDate.getUTCDate().pad(2);
var hour = inpDate.getUTCHours().pad(2);
var min = inpDate.getUTCMinutes().pad(2);
var sec = inpDate.getUTCSeconds().pad(2);
var year = inpDate.getFullYear().pad(4);
var month = (inpDate.getMonth() + 1).pad(2);
var day = inpDate.getDate().pad(2);
var hour = inpDate.getHours().pad(2);
var min = inpDate.getMinutes().pad(2);
var sec = inpDate.getSeconds().pad(2);
return "".concat(year, month, day, "_", hour, min, sec);
};
@@ -195,12 +195,15 @@ tsCtrlrs.factory('tags',function($q, $http, $log){
var createTag = function(tag){
$http.post('/json/tag/add', {
tagName: tag.tagName,
vanityName: tag.vanityName,
tag: tag.tag,
name: tag.name,
units: tag.units,
minExpected: tag.minExpected,
maxExpected: tag.maxExpected,
description: tag.description
guarantee_sec: tag.guarantee_sec,
change_threshold: tag.change_threshold,
description: tag.description,
data_type: tag.data_type
}).success(function(data){
return data;
});
@@ -210,12 +213,15 @@ tsCtrlrs.factory('tags',function($q, $http, $log){
$log.info("updateTag called with "+ JSON.stringify(tag));
$http.post('/json/tag/update', {
id: tag.id,
tagName: tag.tagName,
vanityName: tag.vanityName,
tag: tag.tag,
name: tag.name,
units: tag.units,
minExpected: tag.minExpected,
maxExpected: tag.maxExpected,
description: tag.description
guarantee_sec: tag.guarantee_sec,
change_threshold: tag.change_threshold,
description: tag.description,
data_type: tag.data_type
}).success(function(data){
return data;
}).error(function(err){
@@ -234,6 +240,28 @@ tsCtrlrs.factory('tags',function($q, $http, $log){
return deferred.promise;
};
var clearSingleTagData = function(id){
var deferred = $q.defer();
var url = '/json/clearDatabase/' + id;
$http.get(url).success(function(data) {
deferred.resolve({
status: data.status
});
});
return deferred.promise;
};
var clearAllTagData = function(){
var deferred = $q.defer();
var url = '/json/clearDatabase/all';
$http.get(url).success(function(data) {
deferred.resolve({
status: data.status
});
});
return deferred.promise;
};
return {
getTag: getTag,
getTagList: getTagList,
@@ -243,6 +271,8 @@ tsCtrlrs.factory('tags',function($q, $http, $log){
createTag: createTag,
updateTag: updateTag,
deleteTag: deleteTag,
clearSingleTagData: clearSingleTagData,
clearAllTagData: clearAllTagData
};
});
@@ -396,6 +426,32 @@ tsCtrlrs.controller('tagsCtrl', function($scope, $route, $http, $routeParams, Pa
});
};
$scope.openClearTagData = function(id){
var getTag = tags.getTag(id);
getTag.then(function(data){
if (data.status == "OK"){
$scope.error = false;
$scope.dTagValues = data.tag;
$log.info("Thinking about deleting tag data with parameters: "+ JSON.stringify($scope.dTagValues));
} else {
$scope.error = data.message;
}
});
};
$scope.deleteTagValues = function(id){
var clearSingleTagData = tags.clearSingleTagData(id);
clearSingleTagData.then(function(data){
$log.info("deleting tag "+ id + " status: " + data.status);
if (data.status == "OK"){
$scope.error = false;
$scope.loadTagList();
} else {
$scope.error = data.message;
}
});
};
$scope.openEditTag = function(id){
var getTag = tags.getTag(id);
getTag.then(function(data){
@@ -448,7 +504,7 @@ tsCtrlrs.controller('tagValsCtrl', function($scope, $route, $http, $routeParams,
if (data.status == "OK"){
$scope.data = data;
$scope.data.vals = $scope.data.vals.map(function(x){
return {id: x.id, tagID: x.tagID, val: x.val, dateAdded: new Date(x.dateAdded)};
return {id: x.id, tagID: x.tagID, val: x.val, dtime: new Date(x.dtime)};
});
$scope.error = false;
@@ -466,7 +522,7 @@ tsCtrlrs.controller('tagValsCtrl', function($scope, $route, $http, $routeParams,
],
axes: {
x: {
key: "dateAdded",
key: "dtime",
type: "date"
}
}

View File

@@ -21,14 +21,16 @@
<div ng-if="!error" class="container">
<div class="row">
<button ng-click="loadDashboard()" class="btn btn-large btn-success"><i class="fa fa-refresh"></i> Reload Dashboard</button>
<a href="/json/csv/all" class="btn btn-large btn-primary"><i class="fa fa-download"></i> Download All Data</a>
<div ng-repeat="val in vals">
<div class="col-md-4" style="height:200px; margin-bottom:40px;">
<just-gage id="{{val.vanityName}}" min='val.min' max='val.max' value='val.val' options="{label:val.units,title:val.vanityName, decimals:2, refreshAnimationType:'bounce', startAnimationType:'bounce'}"></just-gage>
<just-gage id="{{val.name}}" min='val.minExpected' max='val.maxExpected' value='val.val' options="{label:val.units,title:val.name, decimals:2, refreshAnimationType:'bounce', startAnimationType:'bounce'}"></just-gage>
<div style="text-align:center">
<h5>{{ val.dtime | date: 'medium'}}</h5>
<a href="/#/tag/{{val.t_id}}" class="btn btn-large btn-primary"><i class="fa fa-line-chart"></i> View Data</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>

View File

@@ -32,19 +32,31 @@
<div ng-if="!error" class="container">
<div class="row" style="margin-bottom:20px;">
<div class="col-md-12">
<h1>{{tag.vanityName}}</h1>
<span class="timeLabel">From: </span><quick-datepicker ng-model='startDatetime'></quick-datepicker> <span class="timeLabel">To: </span><quick-datepicker ng-model='endDatetime'></quick-datepicker> <button ng-click="loadTagVals(startDatetime, endDatetime)" class="btn btn-large btn-success padMe"><i class="fa fa-refresh"></i> Reload Values</button>
<h1>{{tag.name}}</h1>
<div class="row row-flex">
<div class="col-md-4">
<span class="timeLabel">From: </span><quick-datepicker ng-model='startDatetime'></quick-datepicker>
</div>
<div class="col-md-4">
<span class="timeLabel">To: </span><quick-datepicker ng-model='endDatetime'></quick-datepicker>
</div>
<div class="col-md-3">
<button ng-click="loadTagVals(startDatetime, endDatetime)" class="btn btn-large btn-success padMe"><i class="fa fa-refresh"></i> Reload Values</button>
</div>
</div>
</div>
</div>
<div class="row">
<a href="/json/CSV/{{tag.id}}/{{startDatetime | dString}}/{{endDatetime | dString }}" class="btn btn-large btn-primary padMe"><i class="fa fa-download"></i> Download Data</a>
<div class="col-md-8">
<div class="col-md-12">
<div class="tagChart" style="height:400px;">
<linechart data="data" options="options"></linechart>
</div>
</div>
<div class="col-md-4">
</div>
<div class="row topMargin40">
<div class="col-md-6">
<a href="/json/CSV/{{tag.id}}/{{startDatetime | dString}}/{{endDatetime | dString }}" class="btn btn-large btn-primary padMe"><i class="fa fa-download"></i> Download Data</a>
<table class="table">
<thead>
<tr>
@@ -57,7 +69,7 @@
<tr ng-repeat="val in data.vals">
<td>{{val.id}}</td>
<td>{{val.val}} {{tag.units}}</td>
<td>{{val.dateAdded | sqlite_to_local}}</td>
<td>{{val.dtime | date:'short'}}</td>
</tr>
</tbody>
</table>

View File

@@ -1,109 +1,182 @@
<div class="modal fade" id="addModal">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button>
<h4 class="modal-title">Add a New Tag...</h4>
</div>
<div class="modal-body">
<div class="well" ng-if="message"><h3 class="text-danger">{{message}}</h3></div>
<form>
<div class="form-group">
<label for="tagName">Tag Name</label>
<input type="text" ng-model="newTag.tagName" class="form-control" id="tagName" placeholder="Tag Name Here">
</div>
<div class="form-group">
<label for="vanityName">Vanity Name</label>
<input type="text" ng-model="newTag.vanityName" class="form-control" id="vanityName" placeholder="Vanity Name Here">
</div>
<div class="form-group">
<label for="description">Description</label>
<input type="textarea" ng-model="newTag.description" class="form-control" id="description" placeholder="Tag Description Here">
</div>
<div class="form-group">
<label for="minExpected">Minimum Value Expected</label>
<input type="number" ng-model="newTag.minExpected" class="form-control" id="minExpected" placeholder="0">
</div>
<div class="form-group">
<label for="maxExpected">Maximum Value Expected</label>
<input type="number" ng-model="newTag.maxExpected" class="form-control" id="maxExpected" placeholder="100">
</div>
<div class="form-group">
<label for="units">Units</label>
<input type="text" ng-model="newTag.units" class="form-control" id="units" placeholder="lbs, PSI, in, etc.">
</div>
</form>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
<button type="button" class="btn btn-primary" ng-click="submitAddTag();" data-dismiss="modal">Add Tag</button>
</div>
</div><!-- /.modal-content -->
</div><!-- /.modal-dialog -->
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button>
<h4 class="modal-title">Add a New Tag...</h4>
</div>
<div class="modal-body">
<div class="well" ng-if="message"><h3 class="text-danger">{{message}}</h3></div>
<form>
<div class="form-group">
<label for="tag">Tag</label>
<input type="text" ng-model="newTag.tag" class="form-control" id="tag" placeholder="Tag Name Here">
</div>
<div class="form-group">
<label for="name">Name</label>
<input type="text" ng-model="newTag.name" class="form-control" id="name" placeholder="Vanity Name Here">
</div>
<div class="form-group">
<label for="description">Description</label>
<input type="textarea" ng-model="newTag.description" class="form-control" id="description" placeholder="Tag Description Here">
</div>
<div class="form-group">
<label for="data_type">Data Type</label>
<select ng-model="newTag.data_type" class="form-control" id="data_type">
<option value="REAL">Floating Point</option>
<option value="DINT">Integer</option>
<option value="BOOL">Boolean</option>
</select>
</div>
<div class="col-md-5">
<div class="form-group">
<label for="minExpected">Minimum Value Expected</label>
<input type="number" ng-model="newTag.minExpected" class="form-control" id="minExpected" placeholder="0">
</div>
</div>
<div class="col-md-5">
<div class="form-group">
<label for="maxExpected">Maximum Value Expected</label>
<input type="number" ng-model="newTag.maxExpected" class="form-control" id="maxExpected" placeholder="100">
</div>
</div>
<div class="col-md-2">
<div class="form-group">
<label for="units">Units</label>
<input type="text" ng-model="newTag.units" class="form-control" id="units" placeholder="lbs, PSI, in, etc.">
</div>
</div>
<div class="col-md-6">
<div class="form-group">
<label for="change_threshold">Change Threshold</label>
<input type="number" ng-model="newTag.change_threshold" class="form-control" id="change_threshold" placeholder="0">
</div>
</div>
<div class="col-md-6">
<div class="form-group">
<label for="guarantee_sec">Guarantee Sec.</label>
<input type="number" ng-model="newTag.guarantee_sec" class="form-control" id="guarantee_sec" placeholder="3600">
</div>
</div>
</form>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
<button type="button" class="btn btn-primary" ng-click="submitAddTag();" data-dismiss="modal">Add Tag</button>
</div>
</div><!-- /.modal-content -->
</div><!-- /.modal-dialog -->
</div><!-- /.modal -->
<div class="modal fade" id="editModal">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button>
<h4 class="modal-title">Edit a Tag...</h4>
</div>
<div class="modal-body">
<div class="well" ng-if="message"><h3 class="text-danger">{{message}}</h3></div>
<form>
<div class="form-group">
<label for="tagName">Tag Name</label>
<input type="text" ng-model="editTag.tagName" class="form-control" id="tagName" placeholder="Tag Name Here">
</div>
<div class="form-group">
<label for="vanityName">Vanity Name</label>
<input type="text" ng-model="editTag.vanityName" class="form-control" id="vanityName" placeholder="Vanity Name Here">
</div>
<div class="form-group">
<label for="description">Description</label>
<input type="textarea" ng-model="editTag.description" class="form-control" id="description" placeholder="Tag Description Here">
</div>
<div class="form-group">
<label for="minExpected">Minimum Value Expected</label>
<input type="number" ng-model="editTag.minExpected" class="form-control" id="minExpected" placeholder="0">
</div>
<div class="form-group">
<label for="maxExpected">Maximum Value Expected</label>
<input type="number" ng-model="editTag.maxExpected" class="form-control" id="maxExpected" placeholder="100">
</div>
<div class="form-group">
<label for="units">Units</label>
<input type="text" ng-model="editTag.units" class="form-control" id="units" placeholder="lbs, PSI, in, etc.">
</div>
</form>
<pre>{{editTag}}</pre>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
<button type="button" class="btn btn-primary" ng-click="submitEditTag();" data-dismiss="modal">Submit Tag Edits</button>
</div>
</div><!-- /.modal-content -->
</div><!-- /.modal-dialog -->
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button>
<h4 class="modal-title">Edit a Tag...</h4>
</div>
<div class="modal-body">
<div class="well" ng-if="message"><h3 class="text-danger">{{message}}</h3></div>
<form>
<div class="form-group">
<label for="tag">Tag Name</label>
<input type="text" ng-model="editTag.tag" class="form-control" id="tag" placeholder="Tag Name Here">
</div>
<div class="form-group">
<label for="name">Vanity Name</label>
<input type="text" ng-model="editTag.name" class="form-control" id="name" placeholder="Vanity Name Here">
</div>
<div class="form-group">
<label for="description">Description</label>
<input type="textarea" ng-model="editTag.description" class="form-control" id="description" placeholder="Tag Description Here">
</div>
<div class="form-group">
<label for="data_type">Data Type</label>
<select ng-model="editTag.data_type" class="form-control" id="data_type">
<option value="REAL">Floating Point</option>
<option value="DINT">Integer</option>
<option value="BOOL">Boolean</option>
</select>
</div>
<div class="col-md-5">
<div class="form-group">
<label for="minExpected">Minimum Value Expected</label>
<input type="number" ng-model="editTag.minExpected" class="form-control" id="minExpected" placeholder="0">
</div>
</div>
<div class="col-md-5">
<div class="form-group">
<label for="maxExpected">Maximum Value Expected</label>
<input type="number" ng-model="editTag.maxExpected" class="form-control" id="maxExpected" placeholder="100">
</div>
</div>
<div class="col-md-2">
<div class="form-group">
<label for="units">Units</label>
<input type="text" ng-model="editTag.units" class="form-control" id="units" placeholder="lbs, PSI, in, etc.">
</div>
</div>
<div class="col-md-6">
<div class="form-group">
<label for="change_threshold">Change Threshold</label>
<input type="number" ng-model="editTag.change_threshold" class="form-control" id="change_threshold" placeholder="0">
</div>
</div>
<div class="col-md-6">
<div class="form-group">
<label for="guarantee_sec">Guarantee Sec.</label>
<input type="number" ng-model="editTag.guarantee_sec" class="form-control" id="guarantee_sec" placeholder="3600">
</div>
</div>
</form>
<pre>{{editTag}}</pre>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
<button type="button" class="btn btn-primary" ng-click="submitEditTag();" data-dismiss="modal">Submit Tag Edits</button>
</div>
</div><!-- /.modal-content -->
</div><!-- /.modal-dialog -->
</div><!-- /.modal -->
<div class="modal fade" id="deleteModal">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button>
<h4 class="modal-title">Are you sure?</h4>
</div>
<div class="modal-body">
<div class="well" ng-if="message"><h3 class="text-danger">{{message}}</h3></div>
<h3>Are you sure you want to delete the tag {{dTag.vanityName}} ({{dTag.tagName}})?</h3>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-primary" data-dismiss="modal">NO!!!!!!</button>
<button type="button" class="btn btn-danger" ng-click="deleteTag(dTag.id);" data-dismiss="modal">Heck yes, delete it!</button>
</div>
</div><!-- /.modal-content -->
</div><!-- /.modal-dialog -->
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button>
<h4 class="modal-title">Are you sure?</h4>
</div>
<div class="modal-body">
<div class="well" ng-if="message"><h3 class="text-danger">{{message}}</h3></div>
<h3>Are you sure you want to delete the tag {{dTag.name}} ({{dTag.tag}})?</h3>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-primary" data-dismiss="modal">NO!!!!!!</button>
<button type="button" class="btn btn-danger" ng-click="deleteTag(dTag.id);" data-dismiss="modal">Heck yes, delete it!</button>
</div>
</div><!-- /.modal-content -->
</div><!-- /.modal-dialog -->
</div><!-- /.modal -->
<div class="modal fade" id="clearTagDataModal">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button>
<h4 class="modal-title">Are you sure?</h4>
</div>
<div class="modal-body">
<div class="well" ng-if="message"><h3 class="text-danger">{{message}}</h3></div>
<h3>Are you sure you want to delete the data for tag {{dTagValues.name}} ({{dTagValues.tag}})?</h3>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-primary" data-dismiss="modal">NO!!!!!!</button>
<button type="button" class="btn btn-danger" ng-click="deleteTagValues(dTagValues.id);" data-dismiss="modal">Heck yes, delete it!</button>
</div>
</div><!-- /.modal-content -->
</div><!-- /.modal-dialog -->
</div><!-- /.modal -->
@@ -140,17 +213,19 @@
<td>Max Expected Value</td>
<td>Units</td>
<td></td>
<td></td>
<td></td>
</tr>
</thead>
<tbody>
<tr ng-repeat="tag in tags">
<td>{{tag.id}}</td>
<td><a href="/#/tag/{{tag.id}}">{{tag.vanityName}}</a> <i class="fa fa-info-circle" data-toggle="popover" title="{{tag.vanityName}}" data-content="Tag Name: {{tag.tagName}} </br>Details: {{tag.description}}"></i></td>
<td><a href="/#/tag/{{tag.id}}">{{tag.name}}</a> <i class="fa fa-info-circle" data-toggle="popover" title="{{tag.name}}" data-content="Tag Name: {{tag.tag}} </br>Details: {{tag.description}}<br/>Type: {{tag.data_type}}"></i></td>
<td>{{tag.minExpected}}</td>
<td>{{tag.maxExpected}}</td>
<td>{{tag.units}}</td>
<td><button data-toggle="modal" data-target="#editModal" ng-click="openEditTag(tag.id)" class="btn btn-primary">Edit</button></td>
<td><button data-toggle="modal" data-target="#clearTagDataModal" ng-click="openClearTagData(tag.id)" class="btn btn-primary">Clear Data</button></td>
<td><button data-toggle="modal" data-target="#deleteModal" ng-click="openDeleteTag(tag.id)" class="btn btn-danger">Delete</button></td>
<script>
$(document).ready(function(){
@@ -167,6 +242,3 @@
</div>
</div>
</div>

View File

@@ -18,13 +18,14 @@
<script src="/bower_components/justgage-toorshia/justgage.js"></script>
<script src="/bower_components/angular-justgage/ng-justgage.js"></script>
<script src="/node_modules/n3-charts/node_modules/d3/d3.min.js"></script>
<script src="/node_modules/d3/d3.min.js"></script>
<script src="/node_modules/n3-charts/build/LineChart.js"></script>
<link rel="stylesheet" href="/node_modules/n3-charts/build/LineChart.css">
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.min.css">
<script src="/bower_components/ngQuickDate/dist/ng-quick-date.min.js"></script>
<link rel="stylesheet" href="/bower_components/ngQuickDate/dist/ng-quick-date.css">
<link rel="stylesheet" href="/bower_components/ngQuickDate/dist/ng-quick-date-default-theme.css">
<link rel="stylesheet" href="/css/app.css">
<script src="/js/router.js"></script>
<script src="/js/controller.js"></script>
@@ -43,7 +44,7 @@
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" rel="home" href="/" title="Henry Pump">
<a class="navbar-brand" rel="home" href="/#/" title="Henry Pump">
<img style="max-width:100px; "src="/img/logo.png">
</a>
</div>