Reorganizes for 2 docker containers

This commit is contained in:
Patrick McDonagh
2016-10-13 17:18:52 -05:00
parent e374169c71
commit 6dcbcd2846
722 changed files with 173546 additions and 548 deletions

3
.gitmodules vendored
View File

@@ -1,3 +0,0 @@
[submodule "tag"]
path = tag
url=ssh://git-codecommit.us-east-1.amazonaws.com/v1/repos/POCONSOLE-Tag

View File

@@ -1,10 +0,0 @@
{
"transport": "ftp",
"uploadOnSave": false,
"useAtomicWrites": false,
"deleteLocal": false,
"ignore": [
".remote-sync.json",
".git/**"
]
}

Binary file not shown.

14
daq/Dockerfile Normal file
View File

@@ -0,0 +1,14 @@
FROM python:latest
RUN mkdir /root/tag-logger
COPY taglogger.py /root/tag-logger/taglogger.py
COPY sampleData.py /root/tag-logger/sampleData.py
COPY tag /root/tag-logger/tag
# RUN wget https://bootstrap.pypa.io/get-pip.py
# RUN python get-pip.py
RUN pip install requests
RUN git clone https://github.com/ruscito/pycomm.git && cd pycomm && python setup.py install && cd ..
CMD ["python", "/root/tag-logger/sampleData.py"]

View File

@@ -15,7 +15,7 @@ import requests
import json
# DEFAULTS
web_address = "https://localhost:3000"
web_address = "https://10.10.10.10:3000"
scan_rate = 30 # seconds
save_all = "test" # use True, False, or any string
@@ -63,7 +63,7 @@ def main():
get_tag_request_data = {'where': '{"tag_class": 5}'}
get_tag_request = requests.get('{}/tag'.format(web_address), params=get_tag_request_data, verify=False)
tags = json.loads(get_tag_request.text)
except Exception, e:
except Exception as e:
print("Error getting tags: {}".format(e))
time.sleep(10)
main()
@@ -74,7 +74,7 @@ def main():
sr_try = json.loads(sr_req.text)
if len(sr_try) > 0:
scan_rate = int(sr_try[0]['val'])
except Exception, e:
except Exception as e:
print("Error getting scan rage: {}".format(e))
print("I'll just use {} seconds as the scan rate...".format(scan_rate))
@@ -87,7 +87,7 @@ def main():
save_all = True
elif sa_try[0]['val'].lower() == "false":
save_all = False
except Exception, e:
except Exception as e:
print("Error getting save-all: {}".format(e))
print("I'll just use {} as the save-all parameter...".format(save_all))

3
daq/tag/.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "micro800"]
path = micro800
url = http://patrickjmcd@bitbucket.poconsole.net/scm/poconsole/micro800.git

126
daq/tag/alarm.py Normal file
View File

@@ -0,0 +1,126 @@
#! /usr/bin/python
# from datetime import datetime
import time
from pycomm.ab_comm.clx import Driver as ClxDriver
import tag.micro800.micro800 as u800
import requests
import json
# import traceback
# import pickle
web_address = "http://localhost:3000"
def readTag(addr, tag):
time.sleep(0.01)
c = ClxDriver()
if c.open(addr):
try:
v = c.read_tag(tag)
return v
except Exception:
print("ERROR RETRIEVING TAG: {} at {}".format(tag, addr))
err = c.get_status()
c.close()
print err
pass
c.close()
class AnalogAlarm():
global readTag, con
def __init__(self, name, tag, db_id, device_type='CLX', ip_address='192.168.1.10'):
self.name = name
self.tag = tag
self.alarm = False
self.warning = False
self.lastAlarmCheckVal = False
self.lastWarningCheckVal = False
self.device_type = device_type
self.readFn = readTag
self.db_id = db_id
if self.device_type == "u800":
self.readFn = u800.readTag
self.ip_address = ip_address
self.condMapFn = {
20: "Low",
21: "High",
24: "LoLo",
25: "HiHi",
32: "Input Failure",
34: "Configuration Error",
16: "Failure to Stop",
17: "Failure to Start",
18: "Drive Fault"
}
def checkStatus(self, stroke_number):
condition = ''
self.alarm = self.readFn(self.ip_address, '{}.Alarm'.format(self.tag))[0] > 0
alarmChanged = not (self.alarm == self.lastAlarmCheckVal)
self.warning = self.readFn(self.ip_address, '{}.Warning'.format(self.tag))[0] > 0
warningChanged = not (self.warning == self.lastWarningCheckVal)
if (alarmChanged and self.alarm) or (warningChanged and self.warning):
condition = self.condMapFn[int(self.readFn(self.ip_address, '{}.Alarm_Code'.format(self.tag))[0])]
value = self.readFn(self.ip_address, '{}.Alarm_Value'.format(self.tag))[0]
triggerType = "Alarm"
if warningChanged:
triggerType = 'Warning'
data = {
'alarmID': self.db_id,
'type': triggerType,
'cond': condition,
'value': value,
'stroke_number': stroke_number
}
r = requests.post('{}/event'.format(web_address), data=data)
resp = json.loads(r.text)
print("Stored Event {} at {}".format(resp['id'], resp['createdAt']))
if warningChanged:
self.lastWarningCheckVal = self.warning
if alarmChanged:
self.lastAlarmCheckVal = self.alarm
class bitAlarm():
def __init__(self, name, tag, condition, db_id, device_type='CLX', ip_address='192.168.1.10'):
self.name = name
self.tag = tag
self.condition = condition
self.status = False
self.lastStatusCheckVal = False
self.device_type = device_type
self.readFn = readTag
self.db_id = db_id
if self.device_type == "u800":
self.readFn = u800.readTag
self.ip_address = ip_address
def checkStatus(self, stroke_number):
self.status = self.readFn(self.ip_address, self.tag)[0] > 0
statusChanged = not (self.status == self.lastStatusCheckVal)
if statusChanged and self.status:
data = {
'alarmID': self.db_id,
'type': "Info",
'cond': self.condition,
'value': 0.0,
'stroke_number': stroke_number
}
r = requests.post('{}/event'.format(web_address), data=data)
resp = json.loads(r.text)
print("Stored Event {} at {}".format(resp['id'], resp['createdAt']))
if statusChanged:
self.lastStatusCheckVal = self.status

View File

@@ -0,0 +1,119 @@
from pycomm.ab_comm.clx import Driver as plcDriver
import sys
def readMicroTag(addr, tag):
addr = str(addr)
tag = str(tag)
c = plcDriver()
if c.open(addr, True):
try:
v = c.read_tag(tag)
# print(v)
return v
except Exception:
err = c.get_status()
c.close()
print("{} on reading {} from {}".format(err, tag, addr))
pass
c.close()
def getTagType(addr, tag):
addr = str(addr)
tag = str(tag)
c = plcDriver()
if c.open(addr, True):
try:
return c.read_tag(tag)[1]
except Exception:
err = c.get_status()
c.close()
print(err)
pass
c.close()
def write(addr, tag, val, t):
addr = str(addr)
tag = str(tag)
c = plcDriver()
if c.open(addr, True):
try:
wt = c.write_tag(tag, val, t)
return wt
except Exception:
err = c.get_status()
c.close()
print("Write Error: {} setting {} at {} to {} type {}".format(err, tag, addr, val, t))
return False
c.close()
def closeEnough(a, b):
return abs(a - b) <= 0.001
def writeMicroTag(addr, tag, val, handshake=None, handshake_val=None):
addr = str(addr)
tag = str(tag)
print("handshake: {}, handshake_val: {}".format(handshake, handshake_val))
chk_tag = tag
if not(handshake is None) and not(handshake == "None"):
chk_tag = str(handshake)
print("Handshake tag passed, using {}".format(chk_tag))
chk_val = val
if not (handshake_val is None) and not(handshake_val == "None"):
chk_val = handshake_val
print("Handshake value passed, using {}".format(chk_val))
attempts_allowed = 5
attempts = 1
while attempts <= attempts_allowed:
try:
attempts = attempts + 1
cv = readMicroTag(addr, tag)
print("Val Before Write: {}".format(cv))
if cv:
if cv[1] == "REAL":
val = float(val)
chk_val = float(chk_val)
else:
val = int(val)
chk_val = int(chk_val)
wt = write(addr, tag, val, cv[1])
if wt:
print("write: {}".format(wt))
chk = readMicroTag(addr, chk_tag)
if chk:
print("chk: {}, chk_val: {}".format(chk, chk_val))
if closeEnough(chk[0], chk_val):
return True
except Exception as e:
print(e)
return False
def readMicroTagList(addr, tList):
addr = str(addr)
c = plcDriver()
if c.open(addr, True):
vals = []
try:
for t in tList:
v = c.read_tag(t)
vals.append({"tag": t, "val": v[0], "type": v[1]})
# print(v)
# print("{0} - {1}".format(t, v))
except Exception:
err = c.get_status()
c.close()
print(err)
pass
c.close()
return vals
if __name__ == '__main__':
if len(sys.argv) > 2:
print(readMicroTag(sys.argv[1], sys.argv[2]))
else:
print ("Did not pass a target and tag name.")

View File

@@ -0,0 +1 @@
__author__ = 'agostino'

View File

@@ -0,0 +1,2 @@
__author__ = 'agostino'
import logging

View File

@@ -0,0 +1,847 @@
# -*- coding: utf-8 -*-
#
# clx.py - Ethernet/IP Client for Rockwell PLCs
#
#
# Copyright (c) 2014 Agostino Ruscito <ruscito@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from tag.micro800.pycomm_micro.cip.cip_base import *
from tag.micro800.pycomm_micro.common import setup_logger
import logging
class Driver(Base):
"""
This Ethernet/IP client is based on Rockwell specification. Please refer to the link below for details.
http://literature.rockwellautomation.com/idc/groups/literature/documents/pm/1756-pm020_-en-p.pdf
The following services have been implemented:
- Read Tag Service (0x4c)
- Read Tag Fragment Service (0x52)
- Write Tag Service (0x4d)
- Write Tag Fragment Service (0x53)
- Multiple Service Packet (0x0a)
The client has been successfully tested with the following PLCs:
- CompactLogix 5330ERM
- CompactLogix 5370
- ControlLogix 5572 and 1756-EN2T Module
"""
def __init__(self, debug=False, filename=None):
if debug:
super(Driver, self).__init__(setup_logger('ab_comm.clx', logging.DEBUG, filename))
else:
super(Driver, self).__init__(setup_logger('ab_comm.clx', logging.INFO, filename))
self._buffer = {}
self._get_template_in_progress = False
self.__version__ = '0.2'
def get_last_tag_read(self):
""" Return the last tag read by a multi request read
:return: A tuple (tag name, value, type)
"""
return self._last_tag_read
def get_last_tag_write(self):
""" Return the last tag write by a multi request write
:return: A tuple (tag name, 'GOOD') if the write was successful otherwise (tag name, 'BAD')
"""
return self._last_tag_write
def _parse_instance_attribute_list(self, start_tag_ptr, status):
""" extract the tags list from the message received
:param start_tag_ptr: The point in the message string where the tag list begin
:param status: The status of the message receives
"""
tags_returned = self._reply[start_tag_ptr:]
tags_returned_length = len(tags_returned)
idx = 0
instance = 0
count = 0
try:
while idx < tags_returned_length:
instance = unpack_dint(tags_returned[idx:idx+4])
idx += 4
tag_length = unpack_uint(tags_returned[idx:idx+2])
idx += 2
tag_name = tags_returned[idx:idx+tag_length]
idx += tag_length
symbol_type = unpack_uint(tags_returned[idx:idx+2])
idx += 2
count += 1
self._tag_list.append({'instance_id': instance,
'tag_name': tag_name,
'symbol_type': symbol_type})
except Exception as e:
raise DataError(e)
if status == SUCCESS:
self._last_instance = -1
elif status == 0x06:
self._last_instance = instance + 1
else:
self._status = (1, 'unknown status during _parse_tag_list')
self._last_instance = -1
def _parse_structure_makeup_attributes(self, start_tag_ptr, status):
""" extract the tags list from the message received
:param start_tag_ptr: The point in the message string where the tag list begin
:param status: The status of the message receives
"""
self._buffer = {}
if status != SUCCESS:
self._buffer['Error'] = status
return
attribute = self._reply[start_tag_ptr:]
idx = 4
try:
if unpack_uint(attribute[idx:idx + 2]) == SUCCESS:
idx += 2
self._buffer['object_definition_size'] = unpack_dint(attribute[idx:idx + 4])
else:
self._buffer['Error'] = 'object_definition Error'
return
idx += 6
if unpack_uint(attribute[idx:idx + 2]) == SUCCESS:
idx += 2
self._buffer['structure_size'] = unpack_dint(attribute[idx:idx + 4])
else:
self._buffer['Error'] = 'structure Error'
return
idx += 6
if unpack_uint(attribute[idx:idx + 2]) == SUCCESS:
idx += 2
self._buffer['member_count'] = unpack_uint(attribute[idx:idx + 2])
else:
self._buffer['Error'] = 'member_count Error'
return
idx += 4
if unpack_uint(attribute[idx:idx + 2]) == SUCCESS:
idx += 2
self._buffer['structure_handle'] = unpack_uint(attribute[idx:idx + 2])
else:
self._buffer['Error'] = 'structure_handle Error'
return
return self._buffer
except Exception as e:
raise DataError(e)
def _parse_template(self, start_tag_ptr, status):
""" extract the tags list from the message received
:param start_tag_ptr: The point in the message string where the tag list begin
:param status: The status of the message receives
"""
tags_returned = self._reply[start_tag_ptr:]
bytes_received = len(tags_returned)
self._buffer += tags_returned
if status == SUCCESS:
self._get_template_in_progress = False
elif status == 0x06:
self._byte_offset += bytes_received
else:
self._status = (1, 'unknown status {0} during _parse_template'.format(status))
self.logger.warning(self._status)
self._last_instance = -1
def _parse_fragment(self, start_ptr, status):
""" parse the fragment returned by a fragment service.
:param start_ptr: Where the fragment start within the replay
:param status: status field used to decide if keep parsing or stop
"""
try:
data_type = unpack_uint(self._reply[start_ptr:start_ptr+2])
fragment_returned = self._reply[start_ptr+2:]
except Exception as e:
raise DataError(e)
fragment_returned_length = len(fragment_returned)
idx = 0
while idx < fragment_returned_length:
try:
typ = I_DATA_TYPE[data_type]
value = UNPACK_DATA_FUNCTION[typ](fragment_returned[idx:idx+DATA_FUNCTION_SIZE[typ]])
idx += DATA_FUNCTION_SIZE[typ]
except Exception as e:
raise DataError(e)
self._tag_list.append((self._last_position, value))
self._last_position += 1
if status == SUCCESS:
self._byte_offset = -1
elif status == 0x06:
self._byte_offset += fragment_returned_length
else:
self._status = (2, 'unknown status during _parse_fragment')
self._byte_offset = -1
def _parse_multiple_request_read(self, tags):
""" parse the message received from a multi request read:
For each tag parsed, the information extracted includes the tag name, the value read and the data type.
Those information are appended to the tag list as tuple
:return: the tag list
"""
offset = 50
position = 50
try:
number_of_service_replies = unpack_uint(self._reply[offset:offset+2])
tag_list = []
for index in range(number_of_service_replies):
position += 2
start = offset + unpack_uint(self._reply[position:position+2])
general_status = unpack_usint(self._reply[start+2:start+3])
if general_status == 0:
data_type = unpack_uint(self._reply[start+4:start+6])
value_begin = start + 6
value_end = value_begin + DATA_FUNCTION_SIZE[I_DATA_TYPE[data_type]]
value = self._reply[value_begin:value_end]
self._last_tag_read = (tags[index], UNPACK_DATA_FUNCTION[I_DATA_TYPE[data_type]](value),
I_DATA_TYPE[data_type])
else:
self._last_tag_read = (tags[index], None, None)
tag_list.append(self._last_tag_read)
return tag_list
except Exception as e:
raise DataError(e)
def _parse_multiple_request_write(self, tags):
""" parse the message received from a multi request writ:
For each tag parsed, the information extracted includes the tag name and the status of the writing.
Those information are appended to the tag list as tuple
:return: the tag list
"""
offset = 50
position = 50
try:
number_of_service_replies = unpack_uint(self._reply[offset:offset+2])
tag_list = []
for index in range(number_of_service_replies):
position += 2
start = offset + unpack_uint(self._reply[position:position+2])
general_status = unpack_usint(self._reply[start+2:start+3])
if general_status == 0:
self._last_tag_write = (tags[index] + ('GOOD',))
else:
self._last_tag_write = (tags[index] + ('BAD',))
tag_list.append(self._last_tag_write)
return tag_list
except Exception as e:
raise DataError(e)
def _check_reply(self):
""" check the replayed message for error
"""
self._more_packets_available = False
try:
if self._reply is None:
self._status = (3, '%s without reply' % REPLAY_INFO[unpack_dint(self._message[:2])])
return False
# Get the type of command
typ = unpack_uint(self._reply[:2])
# Encapsulation status check
if unpack_dint(self._reply[8:12]) != SUCCESS:
self._status = (3, "{0} reply status:{1}".format(REPLAY_INFO[typ],
SERVICE_STATUS[unpack_dint(self._reply[8:12])]))
return False
# Command Specific Status check
if typ == unpack_uint(ENCAPSULATION_COMMAND["send_rr_data"]):
status = unpack_usint(self._reply[42:43])
if status != SUCCESS:
self._status = (3, "send_rr_data reply:{0} - Extend status:{1}".format(
SERVICE_STATUS[status], get_extended_status(self._reply, 42)))
return False
else:
return True
elif typ == unpack_uint(ENCAPSULATION_COMMAND["send_unit_data"]):
status = unpack_usint(self._reply[48:49])
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Read Tag Fragmented"]:
self._parse_fragment(50, status)
return True
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Get Instance Attributes List"]:
self._parse_instance_attribute_list(50, status)
return True
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Get Attributes"]:
self._parse_structure_makeup_attributes(50, status)
return True
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Read Template"] and \
self._get_template_in_progress:
self._parse_template(50, status)
return True
if status == 0x06:
self._status = (3, "Insufficient Packet Space")
self._more_packets_available = True
elif status != SUCCESS:
self._status = (3, "send_unit_data reply:{0} - Extend status:{1}".format(
SERVICE_STATUS[status], get_extended_status(self._reply, 48)))
return False
else:
return True
return True
except Exception as e:
raise DataError(e)
def read_tag(self, tag):
""" read tag from a connected plc
Possible combination can be passed to this method:
- ('Counts') a single tag name
- (['ControlWord']) a list with one tag or many
- (['parts', 'ControlWord', 'Counts'])
At the moment there is not a strong validation for the argument passed. The user should verify
the correctness of the format passed.
:return: None is returned in case of error otherwise the tag list is returned
"""
multi_requests = False
if isinstance(tag, list):
multi_requests = True
if not self._target_is_connected:
if not self.forward_open():
self._status = (6, "Target did not connected. read_tag will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. read_tag will not be executed.")
# multi_requests = False
if multi_requests:
rp_list = []
for t in tag:
rp = create_tag_rp(t, multi_requests=True)
if rp is None:
self._status = (6, "Cannot create tag {0} request packet. read_tag will not be executed.".format(tag))
raise DataError("Cannot create tag {0} request packet. read_tag will not be executed.".format(tag))
else:
rp_list.append(chr(TAG_SERVICES_REQUEST['Read Tag']) + rp + pack_uint(1))
message_request = build_multiple_service(rp_list, Base._get_sequence())
else:
rp = create_tag_rp(tag)
if rp is None:
self._status = (6, "Cannot create tag {0} request packet. read_tag will not be executed.".format(tag))
return None
else:
# Creating the Message Request Packet
message_request = [
pack_uint(Base._get_sequence()),
chr(TAG_SERVICES_REQUEST['Read Tag']), # the Request Service
chr(len(rp) / 2), # the Request Path Size length in word
rp, # the request path
pack_uint(1)
]
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,
)) is None:
raise DataError("send_unit_data returned not valid data")
if multi_requests:
return self._parse_multiple_request_read(tag)
else:
# Get the data type
data_type = unpack_uint(self._reply[50:52])
# print I_DATA_TYPE[data_type]
try:
return UNPACK_DATA_FUNCTION[I_DATA_TYPE[data_type]](self._reply[52:]), I_DATA_TYPE[data_type]
except Exception as e:
raise DataError(e)
def read_array(self, tag, counts):
""" read array of atomic data type from a connected plc
At the moment there is not a strong validation for the argument passed. The user should verify
the correctness of the format passed.
:param tag: the name of the tag to read
:param counts: the number of element to read
:return: None is returned in case of error otherwise the tag list is returned
"""
if not self._target_is_connected:
if not self.forward_open():
self._status = (7, "Target did not connected. read_tag will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. read_tag will not be executed.")
self._byte_offset = 0
self._last_position = 0
self._tag_list = []
while self._byte_offset != -1:
rp = create_tag_rp(tag)
if rp is None:
self._status = (7, "Cannot create tag {0} request packet. read_tag will not be executed.".format(tag))
return None
else:
# Creating the Message Request Packet
message_request = [
pack_uint(Base._get_sequence()),
chr(TAG_SERVICES_REQUEST["Read Tag Fragmented"]), # the Request Service
chr(len(rp) / 2), # the Request Path Size length in word
rp, # the request path
pack_uint(counts),
pack_dint(self._byte_offset)
]
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,
)) is None:
raise DataError("send_unit_data returned not valid data")
return self._tag_list
def write_tag(self, tag, value=None, typ=None):
""" write tag/tags from a connected plc
Possible combination can be passed to this method:
- ('tag name', Value, data type) as single parameters or inside a tuple
- ([('tag name', Value, data type), ('tag name2', Value, data type)]) as array of tuples
At the moment there is not a strong validation for the argument passed. The user should verify
the correctness of the format passed.
The type accepted are:
- BOOL
- SINT
- INT'
- DINT
- REAL
- LINT
- BYTE
- WORD
- DWORD
- LWORD
:param tag: tag name, or an array of tuple containing (tag name, value, data type)
:param value: the value to write or none if tag is an array of tuple or a tuple
:param typ: the type of the tag to write or none if tag is an array of tuple or a tuple
:return: None is returned in case of error otherwise the tag list is returned
"""
multi_requests = False
if isinstance(tag, list):
multi_requests = True
if not self._target_is_connected:
if not self.forward_open():
self._status = (8, "Target did not connected. write_tag will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. write_tag will not be executed.")
if multi_requests:
rp_list = []
tag_to_remove = []
idx = 0
for name, value, typ in tag:
# Create the request path to wrap the tag name
rp = create_tag_rp(name, multi_requests=True)
if rp is None:
self._status = (8, "Cannot create tag{0} req. packet. write_tag will not be executed".format(tag))
return None
else:
try: # Trying to add the rp to the request path list
val = PACK_DATA_FUNCTION[typ](value)
rp_list.append(
chr(TAG_SERVICES_REQUEST['Write Tag'])
+ rp
+ pack_uint(S_DATA_TYPE[typ])
+ pack_uint(1)
+ val
)
idx += 1
except (LookupError, struct.error) as e:
self._status = (8, "Tag:{0} type:{1} removed from write list. Error:{2}.".format(name, typ, e))
# The tag in idx position need to be removed from the rp list because has some kind of error
tag_to_remove.append(idx)
# Remove the tags that have not been inserted in the request path list
for position in tag_to_remove:
del tag[position]
# Create the message request
message_request = build_multiple_service(rp_list, Base._get_sequence())
else:
if isinstance(tag, tuple):
name, value, typ = tag
else:
name = tag
rp = create_tag_rp(name)
if rp is None:
self._status = (8, "Cannot create tag {0} request packet. write_tag will not be executed.".format(tag))
self.logger.warning(self._status)
return None
else:
# Creating the Message Request Packet
message_request = [
pack_uint(Base._get_sequence()),
chr(TAG_SERVICES_REQUEST["Write Tag"]), # the Request Service
chr(len(rp) / 2), # the Request Path Size length in word
rp, # the request path
pack_uint(S_DATA_TYPE[typ]), # data type
pack_uint(1), # Add the number of tag to write
PACK_DATA_FUNCTION[typ](value)
]
ret_val = self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,
)
)
if multi_requests:
return self._parse_multiple_request_write(tag)
else:
if ret_val is None:
raise DataError("send_unit_data returned not valid data")
return ret_val
def write_array(self, tag, data_type, values):
""" write array of atomic data type from a connected plc
At the moment there is not a strong validation for the argument passed. The user should verify
the correctness of the format passed.
:param tag: the name of the tag to read
:param data_type: the type of tag to write
:param values: the array of values to write
"""
if not isinstance(values, list):
self._status = (9, "A list of tags must be passed to write_array.")
self.logger.warning(self._status)
raise DataError("A list of tags must be passed to write_array.")
if not self._target_is_connected:
if not self.forward_open():
self._status = (9, "Target did not connected. write_array will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. write_array will not be executed.")
array_of_values = ""
byte_size = 0
byte_offset = 0
for i, value in enumerate(values):
array_of_values += PACK_DATA_FUNCTION[data_type](value)
byte_size += DATA_FUNCTION_SIZE[data_type]
if byte_size >= 450 or i == len(values)-1:
# create the message and send the fragment
rp = create_tag_rp(tag)
if rp is None:
self._status = (9, "Cannot create tag {0} request packet. \
write_array will not be executed.".format(tag))
return None
else:
# Creating the Message Request Packet
message_request = [
pack_uint(Base._get_sequence()),
chr(TAG_SERVICES_REQUEST["Write Tag Fragmented"]), # the Request Service
chr(len(rp) / 2), # the Request Path Size length in word
rp, # the request path
pack_uint(S_DATA_TYPE[data_type]), # Data type to write
pack_uint(len(values)), # Number of elements to write
pack_dint(byte_offset),
array_of_values # Fragment of elements to write
]
byte_offset += byte_size
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,
)) is None:
raise DataError("send_unit_data returned not valid data")
array_of_values = ""
byte_size = 0
def _get_instance_attribute_list_service(self):
""" Step 1: Finding user-created controller scope tags in a Logix5000 controller
This service returns instance IDs for each created instance of the symbol class, along with a list
of the attribute data associated with the requested attribute
"""
try:
if not self._target_is_connected:
if not self.forward_open():
self._status = (10, "Target did not connected. get_tag_list will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. get_tag_list will not be executed.")
self._last_instance = 0
self._get_template_in_progress = True
while self._last_instance != -1:
# Creating the Message Request Packet
message_request = [
pack_uint(Base._get_sequence()),
chr(TAG_SERVICES_REQUEST['Get Instance Attributes List']), # STEP 1
# the Request Path Size length in word
chr(3),
# Request Path ( 20 6B 25 00 Instance )
CLASS_ID["8-bit"], # Class id = 20 from spec 0x20
CLASS_CODE["Symbol Object"], # Logical segment: Symbolic Object 0x6B
INSTANCE_ID["16-bit"], # Instance Segment: 16 Bit instance 0x25
'\x00',
pack_uint(self._last_instance), # The instance
# Request Data
pack_uint(2), # Number of attributes to retrieve
pack_uint(1), # Attribute 1: Symbol name
pack_uint(2) # Attribute 2: Symbol type
]
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,
)) is None:
raise DataError("send_unit_data returned not valid data")
self._get_template_in_progress = False
except Exception as e:
raise DataError(e)
def _get_structure_makeup(self, instance_id):
"""
get the structure makeup for a specific structure
"""
if not self._target_is_connected:
if not self.forward_open():
self._status = (10, "Target did not connected. get_tag_list will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. get_tag_list will not be executed.")
message_request = [
pack_uint(self._get_sequence()),
chr(TAG_SERVICES_REQUEST['Get Attributes']),
chr(3), # Request Path ( 20 6B 25 00 Instance )
CLASS_ID["8-bit"], # Class id = 20 from spec 0x20
CLASS_CODE["Template Object"], # Logical segment: Template Object 0x6C
INSTANCE_ID["16-bit"], # Instance Segment: 16 Bit instance 0x25
'\x00',
pack_uint(instance_id),
pack_uint(4), # Number of attributes
pack_uint(4), # Template Object Definition Size UDINT
pack_uint(5), # Template Structure Size UDINT
pack_uint(2), # Template Member Count UINT
pack_uint(1) # Structure Handle We can use this to read and write UINT
]
if self.send_unit_data(
build_common_packet_format(DATA_ITEM['Connected'],
''.join(message_request), ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,)) is None:
raise DataError("send_unit_data returned not valid data")
return self._buffer
def _read_template(self, instance_id, object_definition_size):
""" get a list of the tags in the plc
"""
if not self._target_is_connected:
if not self.forward_open():
self._status = (10, "Target did not connected. get_tag_list will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. get_tag_list will not be executed.")
self._byte_offset = 0
self._buffer = ""
self._get_template_in_progress = True
try:
while self._get_template_in_progress:
# Creating the Message Request Packet
message_request = [
pack_uint(self._get_sequence()),
chr(TAG_SERVICES_REQUEST['Read Template']),
chr(3), # Request Path ( 20 6B 25 00 Instance )
CLASS_ID["8-bit"], # Class id = 20 from spec 0x20
CLASS_CODE["Template Object"], # Logical segment: Template Object 0x6C
INSTANCE_ID["16-bit"], # Instance Segment: 16 Bit instance 0x25
'\x00',
pack_uint(instance_id),
pack_dint(self._byte_offset), # Offset
pack_uint(((object_definition_size * 4)-23) - self._byte_offset)
]
if not self.send_unit_data(
build_common_packet_format(DATA_ITEM['Connected'], ''.join(message_request),
ADDRESS_ITEM['Connection Based'], addr_data=self._target_cid,)):
raise DataError("send_unit_data returned not valid data")
self._get_template_in_progress = False
return self._buffer
except Exception as e:
raise DataError(e)
def _isolating_user_tag(self):
try:
lst = self._tag_list
self._tag_list = []
for tag in lst:
if tag['tag_name'].find(':') != -1 or tag['tag_name'].find('__') != -1:
continue
if tag['symbol_type'] & 0b0001000000000000:
continue
dimension = tag['symbol_type'] & 0b0110000000000000 >> 13
template_instance_id = tag['symbol_type'] & 0b0000111111111111
if tag['symbol_type'] & 0b1000000000000000 :
tag_type = 'struct'
data_type = 'user-created'
self._tag_list.append({'instance_id': tag['instance_id'],
'template_instance_id': template_instance_id,
'tag_name': tag['tag_name'],
'dim': dimension,
'tag_type': tag_type,
'data_type': data_type,
'template': {},
'udt': {}})
else:
tag_type = 'atomic'
data_type = I_DATA_TYPE[template_instance_id]
self._tag_list.append({'instance_id': tag['instance_id'],
'tag_name': tag['tag_name'],
'dim': dimension,
'tag_type': tag_type,
'data_type': data_type})
except Exception as e:
raise DataError(e)
def _parse_udt_raw(self, tag):
try:
buff = self._read_template(tag['template_instance_id'], tag['template']['object_definition_size'])
member_count = tag['template']['member_count']
names = buff.split('\00')
lst = []
tag['udt']['name'] = 'Not an user defined structure'
for name in names:
if len(name) > 1:
if name.find(';') != -1:
tag['udt']['name'] = name[:name.find(';')]
elif name.find('ZZZZZZZZZZ') != -1:
continue
elif name.isalpha():
lst.append(name)
else:
continue
tag['udt']['internal_tags'] = lst
type_list = []
for i in xrange(member_count):
# skip member 1
if i != 0:
array_size = unpack_uint(buff[:2])
try:
data_type = I_DATA_TYPE[unpack_uint(buff[2:4])]
except Exception:
data_type = "None"
offset = unpack_dint(buff[4:8])
type_list.append((array_size, data_type, offset))
buff = buff[8:]
tag['udt']['data_type'] = type_list
except Exception as e:
raise DataError(e)
def get_tag_list(self):
self._tag_list = []
# Step 1
self._get_instance_attribute_list_service()
# Step 2
self._isolating_user_tag()
# Step 3
for tag in self._tag_list:
if tag['tag_type'] == 'struct':
tag['template'] = self._get_structure_makeup(tag['template_instance_id'])
for idx, tag in enumerate(self._tag_list):
# print (tag)
if tag['tag_type'] == 'struct':
self._parse_udt_raw(tag)
# Step 4
return self._tag_list

View File

@@ -0,0 +1,446 @@
# -*- coding: utf-8 -*-
#
# clx.py - Ethernet/IP Client for Rockwell PLCs
#
#
# Copyright (c) 2014 Agostino Ruscito <ruscito@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from tag.micro800.pycomm_micro.cip.cip_base import *
from tag.micro800.pycomm_micro.common import setup_logger
import re
import logging
import math
def parse_tag(tag):
t = re.search(r"(?P<file_type>[CT])(?P<file_number>\d{1,3})"
r"(:)(?P<element_number>\d{1,3})"
r"(.)(?P<sub_element>ACC|PRE|EN|DN|TT|CU|CD|DN|OV|UN|UA)", tag, flags=re.IGNORECASE)
if t:
if (1 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 255):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': t.group('element_number'),
'sub_element': PCCC_CT[t.group('sub_element').upper()],
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 3}
t = re.search(r"(?P<file_type>[FBN])(?P<file_number>\d{1,3})"
r"(:)(?P<element_number>\d{1,3})"
r"(/(?P<sub_element>\d{1,2}))?",
tag, flags=re.IGNORECASE)
if t:
if t.group('sub_element') is not None:
if (1 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 255) \
and (0 <= int(t.group('sub_element')) <= 15):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': t.group('element_number'),
'sub_element': t.group('sub_element'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 3}
else:
if (1 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 255):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': t.group('element_number'),
'sub_element': t.group('sub_element'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 2}
t = re.search(r"(?P<file_type>[IO])(:)(?P<file_number>\d{1,3})"
r"(.)(?P<element_number>\d{1,3})"
r"(/(?P<sub_element>\d{1,2}))?", tag, flags=re.IGNORECASE)
if t:
if t.group('sub_element') is not None:
if (0 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 255) \
and (0 <= int(t.group('sub_element')) <= 15):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': t.group('element_number'),
'sub_element': t.group('sub_element'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 3}
else:
if (0 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 255):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': t.group('element_number'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 2}
t = re.search(r"(?P<file_type>S)"
r"(:)(?P<element_number>\d{1,3})"
r"(/(?P<sub_element>\d{1,2}))?", tag, flags=re.IGNORECASE)
if t:
if t.group('sub_element') is not None:
if (0 <= int(t.group('element_number')) <= 255) \
and (0 <= int(t.group('sub_element')) <= 15):
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': '2',
'element_number': t.group('element_number'),
'sub_element': t.group('sub_element'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 3}
else:
if 0 <= int(t.group('element_number')) <= 255:
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': '2',
'element_number': t.group('element_number'),
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 2}
t = re.search(r"(?P<file_type>B)(?P<file_number>\d{1,3})"
r"(/)(?P<element_number>\d{1,4})",
tag, flags=re.IGNORECASE)
if t:
if (1 <= int(t.group('file_number')) <= 255) \
and (0 <= int(t.group('element_number')) <= 4095):
bit_position = int(t.group('element_number'))
element_number = bit_position / 16
sub_element = bit_position - (element_number * 16)
return True, t.group(0), {'file_type': t.group('file_type').upper(),
'file_number': t.group('file_number'),
'element_number': element_number,
'sub_element': sub_element,
'read_func': '\xa2',
'write_func': '\xab',
'address_field': 3}
return False, tag
class Driver(Base):
"""
SLC/PLC_5 Implementation
"""
def __init__(self, debug=False, filename=None):
if debug:
super(Driver, self).__init__(setup_logger('ab_comm.slc', logging.DEBUG, filename))
else:
super(Driver, self).__init__(setup_logger('ab_comm.slc', logging.INFO, filename))
self.__version__ = '0.1'
self._last_sequence = 0
def _check_reply(self):
"""
check the replayed message for error
"""
self._more_packets_available = False
try:
if self._reply is None:
self._status = (3, '%s without reply' % REPLAY_INFO[unpack_dint(self._message[:2])])
return False
# Get the type of command
typ = unpack_uint(self._reply[:2])
# Encapsulation status check
if unpack_dint(self._reply[8:12]) != SUCCESS:
self._status = (3, "{0} reply status:{1}".format(REPLAY_INFO[typ],
SERVICE_STATUS[unpack_dint(self._reply[8:12])]))
return False
# Command Specific Status check
if typ == unpack_uint(ENCAPSULATION_COMMAND["send_rr_data"]):
status = unpack_usint(self._reply[42:43])
if status != SUCCESS:
self._status = (3, "send_rr_data reply:{0} - Extend status:{1}".format(
SERVICE_STATUS[status], get_extended_status(self._reply, 42)))
return False
else:
return True
elif typ == unpack_uint(ENCAPSULATION_COMMAND["send_unit_data"]):
status = unpack_usint(self._reply[48:49])
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Read Tag Fragmented"]:
self._parse_fragment(50, status)
return True
if unpack_usint(self._reply[46:47]) == I_TAG_SERVICES_REPLY["Get Instance Attributes List"]:
self._parse_tag_list(50, status)
return True
if status == 0x06:
self._status = (3, "Insufficient Packet Space")
self._more_packets_available = True
elif status != SUCCESS:
self._status = (3, "send_unit_data reply:{0} - Extend status:{1}".format(
SERVICE_STATUS[status], get_extended_status(self._reply, 48)))
return False
else:
return True
return True
except Exception as e:
raise DataError(e)
def read_tag(self, tag, n=1):
""" read tag from a connected plc
Possible combination can be passed to this method:
print c.read_tag('F8:0', 3) return a list of 3 registers starting from F8:0
print c.read_tag('F8:0') return one value
It is possible to read status bit
:return: None is returned in case of error
"""
res = parse_tag(tag)
if not res[0]:
self._status = (1000, "Error parsing the tag passed to read_tag({0},{1})".format(tag, n))
self.logger.warning(self._status)
raise DataError("Error parsing the tag passed to read_tag({0},{1})".format(tag, n))
bit_read = False
bit_position = 0
sub_element = 0
if int(res[2]['address_field'] == 3):
bit_read = True
bit_position = int(res[2]['sub_element'])
if not self._target_is_connected:
if not self.forward_open():
self._status = (5, "Target did not connected. read_tag will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. read_tag will not be executed.")
data_size = PCCC_DATA_SIZE[res[2]['file_type']]
# Creating the Message Request Packet
self._last_sequence = pack_uint(Base._get_sequence())
message_request = [
self._last_sequence,
'\x4b',
'\x02',
CLASS_ID["8-bit"],
PATH["PCCC"],
'\x07',
self.attribs['vid'],
self.attribs['vsn'],
'\x0f',
'\x00',
self._last_sequence[1],
self._last_sequence[0],
res[2]['read_func'],
pack_usint(data_size * n),
pack_usint(int(res[2]['file_number'])),
PCCC_DATA_TYPE[res[2]['file_type']],
pack_usint(int(res[2]['element_number'])),
pack_usint(sub_element)
]
self.logger.debug("SLC read_tag({0},{1})".format(tag, n))
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request),
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,)):
sts = int(unpack_usint(self._reply[58]))
try:
if sts != 0:
sts_txt = PCCC_ERROR_CODE[sts]
self._status = (1000, "Error({0}) returned from read_tag({1},{2})".format(sts_txt, tag, n))
self.logger.warning(self._status)
raise DataError("Error({0}) returned from read_tag({1},{2})".format(sts_txt, tag, n))
new_value = 61
if bit_read:
if res[2]['file_type'] == 'T' or res[2]['file_type'] == 'C':
if bit_position == PCCC_CT['PRE']:
return UNPACK_PCCC_DATA_FUNCTION[res[2]['file_type']](
self._reply[new_value+2:new_value+2+data_size])
elif bit_position == PCCC_CT['ACC']:
return UNPACK_PCCC_DATA_FUNCTION[res[2]['file_type']](
self._reply[new_value+4:new_value+4+data_size])
tag_value = UNPACK_PCCC_DATA_FUNCTION[res[2]['file_type']](
self._reply[new_value:new_value+data_size])
return get_bit(tag_value, bit_position)
else:
values_list = []
while len(self._reply[new_value:]) >= data_size:
values_list.append(
UNPACK_PCCC_DATA_FUNCTION[res[2]['file_type']](self._reply[new_value:new_value+data_size])
)
new_value = new_value+data_size
if len(values_list) > 1:
return values_list
else:
return values_list[0]
except Exception as e:
self._status = (1000, "Error({0}) parsing the data returned from read_tag({1},{2})".format(e, tag, n))
self.logger.warning(self._status)
raise DataError("Error({0}) parsing the data returned from read_tag({1},{2})".format(e, tag, n))
else:
raise DataError("send_unit_data returned not valid data")
def write_tag(self, tag, value):
""" write tag from a connected plc
Possible combination can be passed to this method:
c.write_tag('N7:0', [-30, 32767, -32767])
c.write_tag('N7:0', 21)
c.read_tag('N7:0', 10)
It is not possible to write status bit
:return: None is returned in case of error
"""
res = parse_tag(tag)
if not res[0]:
self._status = (1000, "Error parsing the tag passed to read_tag({0},{1})".format(tag, value))
self.logger.warning(self._status)
raise DataError("Error parsing the tag passed to read_tag({0},{1})".format(tag, value))
if isinstance(value, list) and int(res[2]['address_field'] == 3):
self._status = (1000, "Function's parameters error. read_tag({0},{1})".format(tag, value))
self.logger.warning(self._status)
raise DataError("Function's parameters error. read_tag({0},{1})".format(tag, value))
if isinstance(value, list) and int(res[2]['address_field'] == 3):
self._status = (1000, "Function's parameters error. read_tag({0},{1})".format(tag, value))
self.logger.warning(self._status)
raise DataError("Function's parameters error. read_tag({0},{1})".format(tag, value))
bit_field = False
bit_position = 0
sub_element = 0
if int(res[2]['address_field'] == 3):
bit_field = True
bit_position = int(res[2]['sub_element'])
values_list = ''
else:
values_list = '\xff\xff'
multi_requests = False
if isinstance(value, list):
multi_requests = True
if not self._target_is_connected:
if not self.forward_open():
self._status = (1000, "Target did not connected. write_tag will not be executed.")
self.logger.warning(self._status)
raise Error("Target did not connected. write_tag will not be executed.")
try:
n = 0
if multi_requests:
data_size = PCCC_DATA_SIZE[res[2]['file_type']]
for v in value:
values_list += PACK_PCCC_DATA_FUNCTION[res[2]['file_type']](v)
n += 1
else:
n = 1
if bit_field:
data_size = 2
if (res[2]['file_type'] == 'T' or res[2]['file_type'] == 'C') \
and (bit_position == PCCC_CT['PRE'] or bit_position == PCCC_CT['ACC']):
sub_element = bit_position
values_list = '\xff\xff' + PACK_PCCC_DATA_FUNCTION[res[2]['file_type']](value)
else:
sub_element = 0
if value > 0:
values_list = pack_uint(math.pow(2, bit_position)) + pack_uint(math.pow(2, bit_position))
else:
values_list = pack_uint(math.pow(2, bit_position)) + pack_uint(0)
else:
values_list += PACK_PCCC_DATA_FUNCTION[res[2]['file_type']](value)
data_size = PCCC_DATA_SIZE[res[2]['file_type']]
except Exception as e:
self._status = (1000, "Error({0}) packing the values to write to the"
"SLC write_tag({1},{2})".format(e, tag, value))
self.logger.warning(self._status)
raise DataError("Error({0}) packing the values to write to the "
"SLC write_tag({1},{2})".format(e, tag, value))
data_to_write = values_list
# Creating the Message Request Packet
self._last_sequence = pack_uint(Base._get_sequence())
message_request = [
self._last_sequence,
'\x4b',
'\x02',
CLASS_ID["8-bit"],
PATH["PCCC"],
'\x07',
self.attribs['vid'],
self.attribs['vsn'],
'\x0f',
'\x00',
self._last_sequence[1],
self._last_sequence[0],
res[2]['write_func'],
pack_usint(data_size * n),
pack_usint(int(res[2]['file_number'])),
PCCC_DATA_TYPE[res[2]['file_type']],
pack_usint(int(res[2]['element_number'])),
pack_usint(sub_element)
]
self.logger.debug("SLC write_tag({0},{1})".format(tag, value))
if self.send_unit_data(
build_common_packet_format(
DATA_ITEM['Connected'],
''.join(message_request) + data_to_write,
ADDRESS_ITEM['Connection Based'],
addr_data=self._target_cid,)):
sts = int(unpack_usint(self._reply[58]))
try:
if sts != 0:
sts_txt = PCCC_ERROR_CODE[sts]
self._status = (1000, "Error({0}) returned from SLC write_tag({1},{2})".format(sts_txt, tag, value))
self.logger.warning(self._status)
raise DataError("Error({0}) returned from SLC write_tag({1},{2})".format(sts_txt, tag, value))
return True
except Exception as e:
self._status = (1000, "Error({0}) parsing the data returned from "
"SLC write_tag({1},{2})".format(e, tag, value))
self.logger.warning(self._status)
raise DataError("Error({0}) parsing the data returned from "
"SLC write_tag({1},{2})".format(e, tag, value))
else:
raise DataError("send_unit_data returned not valid data")

View File

@@ -0,0 +1 @@
__author__ = 'agostino'

View File

@@ -0,0 +1,827 @@
# -*- coding: utf-8 -*-
#
# cip_base.py - A set of classes methods and structures used to implement Ethernet/IP
#
#
# Copyright (c) 2014 Agostino Ruscito <ruscito@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import struct
import socket
from os import getpid
from tag.micro800.pycomm_micro.cip.cip_const import *
from tag.micro800.pycomm_micro.common import PycommError
class CommError(PycommError):
pass
class DataError(PycommError):
pass
def pack_sint(n):
return struct.pack('b', n)
def pack_usint(n):
return struct.pack('B', n)
def pack_int(n):
"""pack 16 bit into 2 bytes little endian"""
return struct.pack('<h', n)
def pack_uint(n):
"""pack 16 bit into 2 bytes little endian"""
# print("N: {0}".format(n))
return struct.pack('<H', n)
def pack_dint(n):
"""pack 32 bit into 4 bytes little endian"""
return struct.pack('<i', n)
def pack_real(r):
"""unpack 4 bytes little endian to int"""
return struct.pack('<f', r)
def pack_lint(l):
"""unpack 4 bytes little endian to int"""
return struct.unpack('<q', l)
def unpack_bool(st):
if int(struct.unpack('B', st[0])[0]) == 0:
return 0
return 1
def unpack_sint(st):
return int(struct.unpack('b', st[0])[0])
def unpack_usint(st):
return int(struct.unpack('B', st[0])[0])
def unpack_int(st):
"""unpack 2 bytes little endian to int"""
return int(struct.unpack('<h', st[0:2])[0])
def unpack_uint(st):
"""unpack 2 bytes little endian to int"""
return int(struct.unpack('<H', st[0:2])[0])
def unpack_dint(st):
"""unpack 4 bytes little endian to int"""
return int(struct.unpack('<i', st[0:4])[0])
def unpack_real(st):
"""unpack 4 bytes little endian to int"""
return float(struct.unpack('<f', st[0:4])[0])
def unpack_lreal(st):
"""unpack 8 bytes little endian to int"""
return float(struct.unpack('<f', st[0:8])[0])
def unpack_lint(st):
"""unpack 4 bytes little endian to int"""
return int(struct.unpack('<q', st[0:8])[0])
def get_bit(value, idx):
""":returns value of bit at position idx"""
return (value & (1 << idx)) != 0
PACK_DATA_FUNCTION = {
'BOOL': pack_sint,
'SINT': pack_sint, # Signed 8-bit integer
'INT': pack_int, # Signed 16-bit integer
'UINT': pack_uint, # Unsigned 16-bit integer
'USINT': pack_usint, # Unsigned 8-bit integer
'DINT': pack_dint, # Signed 32-bit integer
'REAL': pack_real, # 32-bit floating point
'LREAL': pack_real, # 32-bit floating point
'LINT': pack_lint,
'BYTE': pack_sint, # byte string 8-bits
'WORD': pack_uint, # byte string 16-bits
'DWORD': pack_dint, # byte string 32-bits
'LWORD': pack_lint # byte string 64-bits
}
UNPACK_DATA_FUNCTION = {
'BOOL': unpack_bool,
'SINT': unpack_sint, # Signed 8-bit integer
'INT': unpack_int, # Signed 16-bit integer
'UINT': unpack_uint, # Unsigned 16-bit
'USINT': unpack_usint, # Unsigned 8-bit integer
'DINT': unpack_dint, # Signed 32-bit integer
'UDINT': unpack_dint, # Signed 32-bit integer
'REAL': unpack_real, # 32-bit floating point,
'LREAL': unpack_lreal, # 32-bit floating point,
'LINT': unpack_lint,
'BYTE': unpack_sint, # byte string 8-bits
'WORD': unpack_uint, # byte string 16-bits
'DWORD': unpack_dint, # byte string 32-bits
'LWORD': unpack_lint # byte string 64-bits
}
DATA_FUNCTION_SIZE = {
'BOOL': 1,
'SINT': 1, # Signed 8-bit integer
'INT': 2, # Signed 16-bit integer
'UINT': 2, # Unsigned 16-bit integer
'DINT': 4, # Signed 32-bit integer
'REAL': 4, # 32-bit floating point
'LINT': 8,
'BYTE': 1, # byte string 8-bits
'WORD': 2, # byte string 16-bits
'DWORD': 4, # byte string 32-bits
'LWORD': 8 # byte string 64-bits
}
UNPACK_PCCC_DATA_FUNCTION = {
'N': unpack_int,
'B': unpack_int,
'T': unpack_int,
'C': unpack_int,
'S': unpack_int,
'F': unpack_real,
'A': unpack_sint,
'R': unpack_dint,
'O': unpack_int,
'I': unpack_int
}
PACK_PCCC_DATA_FUNCTION = {
'N': pack_int,
'B': pack_int,
'T': pack_int,
'C': pack_int,
'S': pack_int,
'F': pack_real,
'A': pack_sint,
'R': pack_dint,
'O': pack_int,
'I': pack_int
}
def print_bytes_line(msg):
out = ''
for ch in msg:
out += "{:0>2x}".format(ord(ch))
return out
def print_bytes_msg(msg, info=''):
out = info
new_line = True
line = 0
column = 0
for idx, ch in enumerate(msg):
if new_line:
out += "\n({:0>4d}) ".format(line * 10)
new_line = False
out += "{:0>2x} ".format(ord(ch))
if column == 9:
new_line = True
column = 0
line += 1
else:
column += 1
return out
def get_extended_status(msg, start):
status = unpack_usint(msg[start:start+1])
# send_rr_data
# 42 General Status
# 43 Size of additional status
# 44..n additional status
# send_unit_data
# 48 General Status
# 49 Size of additional status
# 50..n additional status
extended_status_size = (unpack_usint(msg[start+1:start+2]))*2
extended_status = 0
if extended_status_size != 0:
# There is an additional status
if extended_status_size == 1:
extended_status = unpack_usint(msg[start+2:start+3])
elif extended_status_size == 2:
extended_status = unpack_uint(msg[start+2:start+4])
elif extended_status_size == 4:
extended_status = unpack_dint(msg[start+2:start+6])
else:
return 'Extended Status Size Unknown'
try:
return '{0}'.format(EXTEND_CODES[status][extended_status])
except LookupError:
return "Extended Status info not present"
def create_tag_rp(tag, multi_requests=False):
""" Create tag Request Packet
It returns the request packed wrapped around the tag passed.
If any error it returns none
"""
tags = tag.split('.')
rp = []
index = []
for tag in tags:
add_index = False
# Check if is an array tag
if tag.find('[') != -1:
# Remove the last square bracket
tag = tag[:len(tag)-1]
# Isolate the value inside bracket
inside_value = tag[tag.find('[')+1:]
# Now split the inside value in case part of multidimensional array
index = inside_value.split(',')
# Flag the existence of one o more index
add_index = True
# Get only the tag part
tag = tag[:tag.find('[')]
tag_length = len(tag)
# Create the request path
rp.append(EXTENDED_SYMBOL) # ANSI Ext. symbolic segment
rp.append(chr(tag_length)) # Length of the tag
# Add the tag to the Request path
for char in tag:
rp.append(char)
# Add pad byte because total length of Request path must be word-aligned
if tag_length % 2:
rp.append(PADDING_BYTE)
# Add any index
if add_index:
for idx in index:
val = int(idx)
if val <= 0xff:
rp.append(ELEMENT_ID["8-bit"])
rp.append(pack_usint(val))
elif val <= 0xffff:
rp.append(ELEMENT_ID["16-bit"]+PADDING_BYTE)
rp.append(pack_uint(val))
elif val <= 0xfffffffff:
rp.append(ELEMENT_ID["32-bit"]+PADDING_BYTE)
rp.append(pack_dint(val))
else:
# Cannot create a valid request packet
return None
# At this point the Request Path is completed,
if multi_requests:
request_path = chr(len(rp)/2) + ''.join(rp)
else:
request_path = ''.join(rp)
return request_path
def build_common_packet_format(message_type, message, addr_type, addr_data=None, timeout=10):
""" build_common_packet_format
It creates the common part for a CIP message. Check Volume 2 (page 2.22) of CIP specification for reference
"""
msg = pack_dint(0) # Interface Handle: shall be 0 for CIP
msg += pack_uint(timeout) # timeout
msg += pack_uint(2) # Item count: should be at list 2 (Address and Data)
msg += addr_type # Address Item Type ID
if addr_data is not None:
msg += pack_uint(len(addr_data)) # Address Item Length
msg += addr_data
else:
msg += pack_uint(0) # Address Item Length
msg += message_type # Data Type ID
msg += pack_uint(len(message)) # Data Item Length
msg += message
return msg
def build_multiple_service(rp_list, sequence=None):
mr = []
if sequence is not None:
mr.append(pack_uint(sequence))
mr.append(chr(TAG_SERVICES_REQUEST["Multiple Service Packet"])) # the Request Service
mr.append(pack_usint(2)) # the Request Path Size length in word
mr.append(CLASS_ID["8-bit"])
mr.append(CLASS_CODE["Message Router"])
mr.append(INSTANCE_ID["8-bit"])
mr.append(pack_usint(1)) # Instance 1
mr.append(pack_uint(len(rp_list))) # Number of service contained in the request
# Offset calculation
offset = (len(rp_list) * 2) + 2
for index, rp in enumerate(rp_list):
if index == 0:
mr.append(pack_uint(offset)) # Starting offset
else:
mr.append(pack_uint(offset))
offset += len(rp)
for rp in rp_list:
mr.append(rp)
return mr
def parse_multiple_request(message, tags, typ):
""" parse_multi_request
This function should be used to parse the message replayed to a multi request service rapped around the
send_unit_data message.
:param message: the full message returned from the PLC
:param tags: The list of tags to be read
:param typ: to specify if multi request service READ or WRITE
:return: a list of tuple in the format [ (tag name, value, data type), ( tag name, value, data type) ].
In case of error the tuple will be (tag name, None, None)
"""
offset = 50
position = 50
number_of_service_replies = unpack_uint(message[offset:offset+2])
tag_list = []
for index in range(number_of_service_replies):
position += 2
start = offset + unpack_uint(message[position:position+2])
general_status = unpack_usint(message[start+2:start+3])
if general_status == 0:
if typ == "READ":
data_type = unpack_uint(message[start+4:start+6])
try:
value_begin = start + 6
value_end = value_begin + DATA_FUNCTION_SIZE[I_DATA_TYPE[data_type]]
value = message[value_begin:value_end]
tag_list.append((tags[index],
UNPACK_DATA_FUNCTION[I_DATA_TYPE[data_type]](value),
I_DATA_TYPE[data_type]))
except LookupError:
tag_list.append((tags[index], None, None))
else:
tag_list.append((tags[index] + ('GOOD',)))
else:
if typ == "READ":
tag_list.append((tags[index], None, None))
else:
tag_list.append((tags[index] + ('BAD',)))
return tag_list
class Socket:
def __init__(self, timeout=5.0):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(timeout)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
def connect(self, host, port):
try:
self.sock.connect((host, port))
except socket.timeout:
raise CommError("Socket timeout during connection.")
def send(self, msg, timeout=0):
if timeout != 0:
self.sock.settimeout(timeout)
total_sent = 0
while total_sent < len(msg):
try:
sent = self.sock.send(msg[total_sent:])
if sent == 0:
raise CommError("socket connection broken.")
total_sent += sent
except socket.error:
raise CommError("socket connection broken.")
return total_sent
def receive(self, timeout=0):
if timeout != 0:
self.sock.settimeout(timeout)
msg_len = 28
chunks = []
bytes_recd = 0
one_shot = True
while bytes_recd < msg_len:
try:
chunk = self.sock.recv(min(msg_len - bytes_recd, 2048))
if chunk == '':
raise CommError("socket connection broken.")
if one_shot:
data_size = int(struct.unpack('<H', chunk[2:4])[0]) # Length
msg_len = HEADER_SIZE + data_size
one_shot = False
chunks.append(chunk)
bytes_recd += len(chunk)
except socket.error as e:
raise CommError(e)
return ''.join(chunks)
def close(self):
self.sock.close()
def parse_symbol_type(symbol):
""" parse_symbol_type
It parse the symbol to Rockwell Spec
:param symbol: the symbol associated to a tag
:return: A tuple containing information about the tag
"""
pass
return None
class Base(object):
_sequence = 0
def __init__(self, logging):
if Base._sequence == 0:
Base._sequence = getpid()
else:
Base._sequence = Base._get_sequence()
self.logger = logging
self.__version__ = '0.1'
self.__sock = None
self._session = 0
self._connection_opened = False
self._reply = None
self._message = None
self._target_cid = None
self._target_is_connected = False
self._tag_list = []
self._buffer = {}
self._device_description = "Device Unknown"
self._last_instance = 0
self._byte_offset = 0
self._last_position = 0
self._more_packets_available = False
self._last_tag_read = ()
self._last_tag_write = ()
self._status = (0, "")
# self.attribs = {'context': '_pycomm_', 'protocol version': 1, 'rpi': 5000, 'port': 0xAF12, 'timeout': 10,
# 'backplane': 1, 'cpu slot': 0, 'option': 0, 'cid': '\x27\x04\x19\x71', 'csn': '\x27\x04',
# 'vid': '\x09\x10', 'vsn': '\x09\x10\x19\x71', 'name': 'Base', 'ip address': None}
self.attribs = {'context': '_pycomm_', 'protocol version': 1, 'rpi': 5000, 'port': 0xAF12, 'timeout': 10,
'backplane': 0, 'cpu slot': 0, 'option': 0, 'cid': '\x27\x04\x19\x71', 'csn': '\x27\x04',
'vid': '\x09\x10', 'vsn': '\x09\x10\x19\x71', 'name': 'Base', 'ip address': None}
def __len__(self):
return len(self.attribs)
def __getitem__(self, key):
return self.attribs[key]
def __setitem__(self, key, value):
self.attribs[key] = value
def __delitem__(self, key):
try:
del self.attribs[key]
except LookupError:
pass
def __iter__(self):
return iter(self.attribs)
def __contains__(self, item):
return item in self.attribs
def _check_reply(self):
raise Socket.ImplementationError("The method has not been implemented")
@staticmethod
def _get_sequence():
""" Increase and return the sequence used with connected messages
:return: The New sequence
"""
if Base._sequence < 65535:
Base._sequence += 1
else:
Base._sequence = getpid()
return Base._sequence
def nop(self):
""" No replay command
A NOP provides a way for either an originator or target to determine if the TCP connection is still open.
"""
self._message = self.build_header(ENCAPSULATION_COMMAND['nop'], 0)
self._send()
def __repr__(self):
return self._device_description
def description(self):
return self._device_description
def list_identity(self):
""" ListIdentity command to locate and identify potential target
return true if the replay contains the device description
"""
self._message = self.build_header(ENCAPSULATION_COMMAND['list_identity'], 0)
self._send()
self._receive()
if self._check_reply():
try:
self._device_description = self._reply[63:-1]
return True
except Exception as e:
raise CommError(e)
return False
def send_rr_data(self, msg):
""" SendRRData transfer an encapsulated request/reply packet between the originator and target
:param msg: The message to be send to the target
:return: the replay received from the target
"""
self._message = self.build_header(ENCAPSULATION_COMMAND["send_rr_data"], len(msg))
self._message += msg
self._send()
self._receive()
return self._check_reply()
def send_unit_data(self, msg):
""" SendUnitData send encapsulated connected messages.
:param msg: The message to be send to the target
:return: the replay received from the target
"""
self._message = self.build_header(ENCAPSULATION_COMMAND["send_unit_data"], len(msg))
self._message += msg
self._send()
self._receive()
return self._check_reply()
def get_status(self):
""" Get the last status/error
This method can be used after any call to get any details in case of error
:return: A tuple containing (error group, error message)
"""
return self._status
def clear(self):
""" Clear the last status/error
:return: return am empty tuple
"""
self._status = (0, "")
def build_header(self, command, length):
""" Build the encapsulate message header
The header is 24 bytes fixed length, and includes the command and the length of the optional data portion.
:return: the headre
"""
try:
h = command # Command UINT
h += pack_uint(length) # Length UINT
h += pack_dint(self._session) # Session Handle UDINT
h += pack_dint(0) # Status UDINT
h += self.attribs['context'] # Sender Context 8 bytes
h += pack_dint(self.attribs['option']) # Option UDINT
return h
except Exception as e:
raise CommError(e)
def register_session(self):
""" Register a new session with the communication partner
:return: None if any error, otherwise return the session number
"""
if self._session:
return self._session
self._session = 0
self._message = self.build_header(ENCAPSULATION_COMMAND['register_session'], 4)
self._message += pack_uint(self.attribs['protocol version'])
self._message += pack_uint(0)
self._send()
self._receive()
if self._check_reply():
self._session = unpack_dint(self._reply[4:8])
self.logger.debug("Session ={0} has been registered.".format(print_bytes_line(self._reply[4:8])))
return self._session
self._status = 'Warning ! the session has not been registered.'
self.logger.warning(self._status)
return None
def forward_open(self):
""" CIP implementation of the forward open message
Refer to ODVA documentation Volume 1 3-5.5.2
:return: False if any error in the replayed message
"""
if self._session == 0:
self._status = (4, "A session need to be registered before to call forward_open.")
raise CommError("A session need to be registered before to call forward open")
forward_open_msg = [
FORWARD_OPEN,
pack_usint(2),
CLASS_ID["8-bit"],
CLASS_CODE["Connection Manager"], # Volume 1: 5-1
INSTANCE_ID["8-bit"],
CONNECTION_MANAGER_INSTANCE['Open Request'],
PRIORITY,
TIMEOUT_TICKS,
pack_dint(0),
self.attribs['cid'],
self.attribs['csn'],
self.attribs['vid'],
self.attribs['vsn'],
TIMEOUT_MULTIPLIER,
'\x00\x00\x00',
pack_dint(self.attribs['rpi'] * 1000),
pack_uint(CONNECTION_PARAMETER['Default']),
pack_dint(self.attribs['rpi'] * 1000),
pack_uint(CONNECTION_PARAMETER['Default']),
TRANSPORT_CLASS, # Transport Class
# CONNECTION_SIZE['Backplane'],
CONNECTION_SIZE['Direct Network'],
# pack_usint(self.attribs['backplane']),
# pack_usint(self.attribs['cpu slot']),
CLASS_ID["8-bit"],
CLASS_CODE["Message Router"],
INSTANCE_ID["8-bit"],
pack_usint(1)
]
if self.send_rr_data(
build_common_packet_format(DATA_ITEM['Unconnected'], ''.join(forward_open_msg), ADDRESS_ITEM['UCMM'],)):
self._target_cid = self._reply[44:48]
self._target_is_connected = True
return True
self._status = (4, "forward_open returned False")
return False
def forward_close(self):
""" CIP implementation of the forward close message
Each connection opened with the froward open message need to be closed.
Refer to ODVA documentation Volume 1 3-5.5.3
:return: False if any error in the replayed message
"""
if self._session == 0:
self._status = (5, "A session need to be registered before to call forward_close.")
raise CommError("A session need to be registered before to call forward_close.")
# print ("Backplane:{0}\nCPU:{1}".format(self.attribs['backplane'], self.attribs['cpu slot']))
forward_close_msg = [
FORWARD_CLOSE,
pack_usint(2),
CLASS_ID["8-bit"],
CLASS_CODE["Connection Manager"], # Volume 1: 5-1
INSTANCE_ID["8-bit"],
CONNECTION_MANAGER_INSTANCE['Open Request'],
PRIORITY,
TIMEOUT_TICKS,
self.attribs['csn'],
self.attribs['vid'],
self.attribs['vsn'],
CONNECTION_SIZE['Direct Network'],
# CONNECTION_SIZE['Backplane'],
'\x00', # Reserved
# pack_usint(self.attribs['backplane']),
# pack_usint(self.attribs['cpu slot']),
CLASS_ID["8-bit"],
CLASS_CODE["Message Router"],
INSTANCE_ID["8-bit"],
pack_usint(1)
]
if self.send_rr_data(
build_common_packet_format(DATA_ITEM['Unconnected'], ''.join(forward_close_msg), ADDRESS_ITEM['UCMM'])):
self._target_is_connected = False
return True
self._status = (5, "forward_close returned False")
self.logger.warning(self._status)
return False
def un_register_session(self):
""" Un-register a connection
"""
self._message = self.build_header(ENCAPSULATION_COMMAND['unregister_session'], 0)
self._send()
self._session = None
def _send(self):
"""
socket send
:return: true if no error otherwise false
"""
try:
self.logger.debug(print_bytes_msg(self._message, '-------------- SEND --------------'))
self.__sock.send(self._message)
except Exception as e:
#self.clean_up()
raise CommError(e)
def _receive(self):
"""
socket receive
:return: true if no error otherwise false
"""
try:
self._reply = self.__sock.receive()
self.logger.debug(print_bytes_msg(self._reply, '----------- RECEIVE -----------'))
except Exception as e:
#self.clean_up()
raise CommError(e)
def open(self, ip_address):
"""
socket open
:return: true if no error otherwise false
"""
# handle the socket layer
if not self._connection_opened:
try:
if self.__sock is None:
self.__sock = Socket()
self.__sock.connect(ip_address, self.attribs['port'])
self._connection_opened = True
self.attribs['ip address'] = ip_address
if self.register_session() is None:
self._status = (13, "Session not registered")
return False
self.forward_close()
return True
except Exception as e:
#self.clean_up()
raise CommError(e)
def close(self):
"""
socket close
:return: true if no error otherwise false
"""
try:
if self._target_is_connected:
self.forward_close()
if self._session != 0:
self.un_register_session()
if self.__sock:
self.__sock.close()
except Exception as e:
raise CommError(e)
self.clean_up()
def clean_up(self):
self.__sock = None
self._target_is_connected = False
self._session = 0
self._connection_opened = False
def is_connected(self):
return self._connection_opened

View File

@@ -0,0 +1,482 @@
# -*- coding: utf-8 -*-
#
# cip_const.py - A set of structures and constants used to implement the Ethernet/IP protocol
#
#
# Copyright (c) 2014 Agostino Ruscito <ruscito@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
ELEMENT_ID = {
"8-bit": '\x28',
"16-bit": '\x29',
"32-bit": '\x2a'
}
CLASS_ID = {
"8-bit": '\x20',
"16-bit": '\x21',
}
INSTANCE_ID = {
"8-bit": '\x24',
"16-bit": '\x25'
}
ATTRIBUTE_ID = {
"8-bit": '\x30',
"16-bit": '\x31'
}
# Path are combined as:
# CLASS_ID + PATHS
# For example PCCC path is CLASS_ID["8-bit"]+PATH["PCCC"] -> 0x20, 0x67, 0x24, 0x01.
PATH = {
'Connection Manager': '\x06\x24\x01',
'Router': '\x02\x24\x01',
'Backplane Data Type': '\x66\x24\x01',
'PCCC': '\x67\x24\x01',
'DHCP Channel A': '\xa6\x24\x01\x01\x2c\x01',
'DHCP Channel B': '\xa6\x24\x01\x02\x2c\x01'
}
ENCAPSULATION_COMMAND = { # Volume 2: 2-3.2 Command Field UINT 2 byte
"nop": '\x00\x00',
"list_targets": '\x01\x00',
"list_services": '\x04\x00',
"list_identity": '\x63\x00',
"list_interfaces": '\x64\x00',
"register_session": '\x65\x00',
"unregister_session": '\x66\x00',
"send_rr_data": '\x6F\x00',
"send_unit_data": '\x70\x00'
}
"""
When a tag is created, an instance of the Symbol Object (Class ID 0x6B) is created
inside the controller.
When a UDT is created, an instance of the Template object (Class ID 0x6C) is
created to hold information about the structure makeup.
"""
CLASS_CODE = {
"Message Router": '\x02', # Volume 1: 5-1
"Symbol Object": '\x6b',
"Template Object": '\x6c',
"Connection Manager": '\x06' # Volume 1: 3-5
}
CONNECTION_MANAGER_INSTANCE = {
'Open Request': '\x01',
'Open Format Rejected': '\x02',
'Open Resource Rejected': '\x03',
'Open Other Rejected': '\x04',
'Close Request': '\x05',
'Close Format Request': '\x06',
'Close Other Request': '\x07',
'Connection Timeout': '\x08'
}
TAG_SERVICES_REQUEST = {
"Read Tag": 0x4c,
"Read Tag Fragmented": 0x52,
"Write Tag": 0x4d,
"Write Tag Fragmented": 0x53,
"Read Modify Write Tag": 0x4e,
"Multiple Service Packet": 0x0a,
"Get Instance Attributes List": 0x55,
"Get Attributes": 0x03,
"Read Template": 0x4c,
}
TAG_SERVICES_REPLY = {
0xcc: "Read Tag",
0xd2: "Read Tag Fragmented",
0xcd: "Write Tag",
0xd3: "Write Tag Fragmented",
0xce: "Read Modify Write Tag",
0x8a: "Multiple Service Packet",
0xd5: "Get Instance Attributes List",
0x83: "Get Attributes",
0xcc: "Read Template"
}
I_TAG_SERVICES_REPLY = {
"Read Tag": 0xcc,
"Read Tag Fragmented": 0xd2,
"Write Tag": 0xcd,
"Write Tag Fragmented": 0xd3,
"Read Modify Write Tag": 0xce,
"Multiple Service Packet": 0x8a,
"Get Instance Attributes List": 0xd5,
"Get Attributes": 0x83,
"Read Template": 0xcc
}
"""
EtherNet/IP Encapsulation Error Codes
Standard CIP Encapsulation Error returned in the cip message header
"""
STATUS = {
0x0000: "Success",
0x0001: "The sender issued an invalid or unsupported encapsulation command",
0x0002: "Insufficient memory",
0x0003: "Poorly formed or incorrect data in the data portion",
0x0064: "An originator used an invalid session handle when sending an encapsulation message to the target",
0x0065: "The target received a message of invalid length",
0x0069: "Unsupported Protocol Version"
}
"""
MSG Error Codes:
The following error codes have been taken from:
Rockwell Automation Publication
1756-RM003P-EN-P - December 2014
"""
SERVICE_STATUS = {
0x01: "Connection failure (see extended status)",
0x02: "Insufficient resource",
0x03: "Invalid value",
0x04: "IOI syntax error. A syntax error was detected decoding the Request Path (see extended status)",
0x05: "Destination unknown, class unsupported, instance \nundefined or structure element undefined (see extended status)",
0x06: "Insufficient Packet Space",
0x07: "Connection lost",
0x08: "Service not supported",
0x09: "Error in data segment or invalid attribute value",
0x0A: "Attribute list error",
0x0B: "State already exist",
0x0C: "Object state conflict",
0x0D: "Object already exist",
0x0E: "Attribute not settable",
0x0F: "Permission denied",
0x10: "Device state conflict",
0x11: "Reply data too large",
0x12: "Fragmentation of a primitive value",
0x13: "Insufficient command data",
0x14: "Attribute not supported",
0x15: "Too much data",
0x1A: "Bridge request too large",
0x1B: "Bridge response too large",
0x1C: "Attribute list shortage",
0x1D: "Invalid attribute list",
0x1E: "Request service error",
0x1F: "Connection related failure (see extended status)",
0x22: "Invalid reply received",
0x25: "Key segment error",
0x26: "Invalid IOI error",
0x27: "Unexpected attribute in list",
0x28: "DeviceNet error - invalid member ID",
0x29: "DeviceNet error - member not settable",
0xD1: "Module not in run state",
0xFB: "Message port not supported",
0xFC: "Message unsupported data type",
0xFD: "Message uninitialized",
0xFE: "Message timeout",
0xff: "General Error (see extended status)"
}
EXTEND_CODES = {
0x01: {
0x0100: "Connection in use",
0x0103: "Transport not supported",
0x0106: "Ownership conflict",
0x0107: "Connection not found",
0x0108: "Invalid connection type",
0x0109: "Invalid connection size",
0x0110: "Module not configured",
0x0111: "EPR not supported",
0x0114: "Wrong module",
0x0115: "Wrong device type",
0x0116: "Wrong revision",
0x0118: "Invalid configuration format",
0x011A: "Application out of connections",
0x0203: "Connection timeout",
0x0204: "Unconnected message timeout",
0x0205: "Unconnected send parameter error",
0x0206: "Message too large",
0x0301: "No buffer memory",
0x0302: "Bandwidth not available",
0x0303: "No screeners available",
0x0305: "Signature match",
0x0311: "Port not available",
0x0312: "Link address not available",
0x0315: "Invalid segment type",
0x0317: "Connection not scheduled"
},
0x04: {
0x0000: "Extended status out of memory",
0x0001: "Extended status out of instances"
},
0x05: {
0x0000: "Extended status out of memory",
0x0001: "Extended status out of instances"
},
0x1F: {
0x0203: "Connection timeout"
},
0xff: {
0x7: "Wrong data type",
0x2001: "Excessive IOI",
0x2002: "Bad parameter value",
0x2018: "Semaphore reject",
0x201B: "Size too small",
0x201C: "Invalid size",
0x2100: "Privilege failure",
0x2101: "Invalid keyswitch position",
0x2102: "Password invalid",
0x2103: "No password issued",
0x2104: "Address out of range",
0x2105: "Address and how many out of range",
0x2106: "Data in use",
0x2107: "Type is invalid or not supported",
0x2108: "Controller in upload or download mode",
0x2109: "Attempt to change number of array dimensions",
0x210A: "Invalid symbol name",
0x210B: "Symbol does not exist",
0x210E: "Search failed",
0x210F: "Task cannot start",
0x2110: "Unable to write",
0x2111: "Unable to read",
0x2112: "Shared routine not editable",
0x2113: "Controller in faulted mode",
0x2114: "Run mode inhibited"
}
}
DATA_ITEM = {
'Connected': '\xb1\x00',
'Unconnected': '\xb2\x00'
}
ADDRESS_ITEM = {
'Connection Based': '\xa1\x00',
'Null': '\x00\x00',
'UCMM': '\x00\x00'
}
UCMM = {
'Interface Handle': 0,
'Item Count': 2,
'Address Type ID': 0,
'Address Length': 0,
'Data Type ID': 0x00b2
}
CONNECTION_SIZE = {
'Backplane': '\x03', # CLX
'Direct Network': '\x02'
}
HEADER_SIZE = 24
EXTENDED_SYMBOL = '\x91'
BOOL_ONE = 0xff
REQUEST_SERVICE = 0
REQUEST_PATH_SIZE = 1
REQUEST_PATH = 2
SUCCESS = 0
INSUFFICIENT_PACKETS = 6
OFFSET_MESSAGE_REQUEST = 40
FORWARD_CLOSE = '\x4e'
UNCONNECTED_SEND = '\x52'
FORWARD_OPEN = '\x54'
LARGE_FORWARD_OPEN = '\x5b'
GET_CONNECTION_DATA = '\x56'
SEARCH_CONNECTION_DATA = '\x57'
GET_CONNECTION_OWNER = '\x5a'
MR_SERVICE_SIZE = 2
PADDING_BYTE = '\x00'
PRIORITY = '\x0a'
TIMEOUT_TICKS = '\x05'
TIMEOUT_MULTIPLIER = '\x01'
TRANSPORT_CLASS = '\xa3'
CONNECTION_PARAMETER = {
'PLC5': 0x4302,
'SLC500': 0x4302,
'CNET': 0x4320,
'DHP': 0x4302,
'Default': 0x43f8,
}
"""
Atomic Data Type:
Bit = Bool
Bit array = DWORD (32-bit boolean aray)
8-bit integer = SINT
16-bit integer = UINT
32-bit integer = DINT
32-bit float = REAL
64-bit integer = LINT
From Rockwell Automation Publication 1756-PM020C-EN-P November 2012:
When reading a BOOL tag, the values returned for 0 and 1 are 0 and 0xff, respectively.
"""
S_DATA_TYPE = {
'BOOL': 0xc1,
'SINT': 0xc2, # Signed 8-bit integer
'INT': 0xc3, # Signed 16-bit integer
'DINT': 0xc4, # Signed 32-bit integer
'LINT': 0xc5, # Signed 64-bit integer
'USINT': 0xc6, # Unsigned 8-bit integer
'UINT': 0xc7, # Unsigned 16-bit integer
'UDINT': 0xc8, # Unsigned 32-bit integer
'ULINT': 0xc9, # Unsigned 64-bit integer
'REAL': 0xca, # 32-bit floating point
'LREAL': 0xcb, # 64-bit floating point
'STIME': 0xcc, # Synchronous time
'DATE': 0xcd,
'TIME_OF_DAY': 0xce,
'DATE_AND_TIME': 0xcf,
'STRING': 0xd0, # character string (1 byte per character)
'BYTE': 0xd1, # byte string 8-bits
'WORD': 0xd2, # byte string 16-bits
'DWORD': 0xd3, # byte string 32-bits
'LWORD': 0xd4, # byte string 64-bits
'STRING2': 0xd5, # character string (2 byte per character)
'FTIME': 0xd6, # Duration high resolution
'LTIME': 0xd7, # Duration long
'ITIME': 0xd8, # Duration short
'STRINGN': 0xd9, # character string (n byte per character)
'SHORT_STRING': 0xda, # character string (1 byte per character, 1 byte length indicator)
'TIME': 0xdb, # Duration in milliseconds
'EPATH': 0xdc, # CIP Path segment
'ENGUNIT': 0xdd, # Engineering Units
'STRINGI': 0xde # International character string
}
I_DATA_TYPE = {
0xc1: 'BOOL',
0xc2: 'SINT', # Signed 8-bit integer
0xc3: 'INT', # Signed 16-bit integer
0xc4: 'DINT', # Signed 32-bit integer
0xc5: 'LINT', # Signed 64-bit integer
0xc6: 'USINT', # Unsigned 8-bit integer
0xc7: 'UINT', # Unsigned 16-bit integer
0xc8: 'UDINT', # Unsigned 32-bit integer
0xc9: 'ULINT', # Unsigned 64-bit integer
0xca: 'REAL', # 32-bit floating point
0xcb: 'LREAL', # 64-bit floating point
0xcc: 'STIME', # Synchronous time
0xcd: 'DATE',
0xce: 'TIME_OF_DAY',
0xcf: 'DATE_AND_TIME',
0xd0: 'STRING', # character string (1 byte per character)
0xd1: 'BYTE', # byte string 8-bits
0xd2: 'WORD', # byte string 16-bits
0xd3: 'DWORD', # byte string 32-bits
0xd4: 'LWORD', # byte string 64-bits
0xd5: 'STRING2', # character string (2 byte per character)
0xd6: 'FTIME', # Duration high resolution
0xd7: 'LTIME', # Duration long
0xd8: 'ITIME', # Duration short
0xd9: 'STRINGN', # character string (n byte per character)
0xda: 'SHORT_STRING', # character string (1 byte per character, 1 byte length indicator)
0xdb: 'TIME', # Duration in milliseconds
0xdc: 'EPATH', # CIP Path segment
0xdd: 'ENGUNIT', # Engineering Units
0xde: 'STRINGI' # International character string
}
REPLAY_INFO = {
0x4e: 'FORWARD_CLOSE (4E,00)',
0x52: 'UNCONNECTED_SEND (52,00)',
0x54: 'FORWARD_OPEN (54,00)',
0x6f: 'send_rr_data (6F,00)',
0x70: 'send_unit_data (70,00)',
0x00: 'nop',
0x01: 'list_targets',
0x04: 'list_services',
0x63: 'list_identity',
0x64: 'list_interfaces',
0x65: 'register_session',
0x66: 'unregister_session',
}
PCCC_DATA_TYPE = {
'N': '\x89',
'B': '\x85',
'T': '\x86',
'C': '\x87',
'S': '\x84',
'F': '\x8a',
'ST': '\x8d',
'A': '\x8e',
'R': '\x88',
'O': '\x8b',
'I': '\x8c'
}
PCCC_DATA_SIZE = {
'N': 2,
'B': 2,
'T': 6,
'C': 6,
'S': 2,
'F': 4,
'ST': 84,
'A': 2,
'R': 6,
'O': 2,
'I': 2
}
PCCC_CT = {
'PRE': 1,
'ACC': 2,
'EN': 15,
'TT': 14,
'DN': 13,
'CU': 15,
'CD': 14,
'OV': 12,
'UN': 11,
'UA': 10
}
PCCC_ERROR_CODE = {
-2: "Not Acknowledged (NAK)",
-3: "No Reponse, Check COM Settings",
-4: "Unknown Message from DataLink Layer",
-5: "Invalid Address",
-6: "Could Not Open Com Port",
-7: "No data specified to data link layer",
-8: "No data returned from PLC",
-20: "No Data Returned",
16: "Illegal Command or Format, Address may not exist or not enough elements in data file",
32: "PLC Has a Problem and Will Not Communicate",
48: "Remote Node Host is Missing, Disconnected, or Shut Down",
64: "Host Could Not Complete Function Due To Hardware Fault",
80: "Addressing problem or Memory Protect Rungs",
96: "Function not allows due to command protection selection",
112: "Processor is in Program mode",
128: "Compatibility mode file missing or communication zone problem",
144: "Remote node cannot buffer command",
240: "Error code in EXT STS Byte"
}

View File

@@ -0,0 +1,32 @@
__author__ = 'Agostino Ruscito'
__version__ = "1.0.7"
__date__ = "08 03 2015"
import logging
logging.basicConfig(
filename="pycomm.log",
filemode='w',
level=logging.INFO,
format="%(name)-13s %(levelname)-10s %(asctime)s %(message)s",
# propagate=0,
)
LOGGER = logging.getLogger('pycomm')
class PycommError(Exception):
pass
def setup_logger(name, level, filename=None):
log = logging.getLogger('pycomm.'+name)
log.setLevel(level)
if filename:
fh = logging.FileHandler(filename, mode='w')
fh.setFormatter(logging.Formatter("%(levelname)-10s %(asctime)s %(message)s"))
log.addHandler(fh)
log.propagate = False
return log

107
daq/tag/tag.py Normal file
View File

@@ -0,0 +1,107 @@
#! /usr/bin/python
# from datetime import datetime
import time
from pycomm.ab_comm.clx import Driver as ClxDriver
import tag.micro800.micro800 as u800
import requests
import json
# import traceback
# import pickle
web_address = "https://10.10.10.10:3000"
def readTag(addr, tag):
time.sleep(0.01)
c = ClxDriver()
if c.open(addr):
try:
v = c.read_tag(tag)
return v
except Exception:
print("ERROR RETRIEVING TAG: {} at {}".format(tag, addr))
err = c.get_status()
c.close()
print(err)
pass
c.close()
def writeTag(addr, tag, val):
time.sleep(0.01)
pv = readTag(addr, tag)
if pv:
c = ClxDriver()
if c.open(addr):
try:
v = c.write_tag(tag, val, pv[1])
return v
except Exception:
print("ERROR WRITING TAG: {} at {}".format(tag, addr))
err = c.get_status()
c.close()
print(err)
pass
c.close()
class Tag():
global readTag, writeTag, web_address
def __init__(self, name, tag, db_id, data_type, change_threshold, guarantee_sec, mapFn=None, device_type='CLX', ip_address='192.168.1.10'):
self.name = name
self.tag = tag
self.data_type = data_type
self.value = None
self.last_value = None
self.guarantee_sec = guarantee_sec
self.chg_threshold = change_threshold
self.last_send_time = 0
self.mapFn = mapFn
self.device_type = device_type
self.readFn = readTag
self.writeFn = writeTag
self.db_id = db_id
if self.device_type == "u800" or self.device_type == "Micro800":
self.readFn = u800.readMicroTag
self.writeFn = u800.writeMicroTag
self.ip_address = ip_address
def read(self, forceSend):
writeToDB = False
if self.tag:
v = self.readFn(str(self.ip_address), str(self.tag))
if v:
val = v[0]
if self.data_type == 'BOOL' or self.data_type == 'STRING':
if self.mapFn:
val = self.mapFn[val]
if (self.last_send_time == 0) or (self.value is None) or not (self.value == val) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend is True):
self.last_value = self.value
self.value = val
writeToDB = True
else:
writeToDB = False
else:
if (self.last_send_time == 0) or (self.value is None) or (abs(self.value - v[0]) > self.chg_threshold) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend is True):
self.last_value = self.value
self.value = v[0]
writeToDB = True
else:
writeToDB = False
if forceSend is False:
writeToDB = False
if writeToDB:
self.sendToDB()
return self.value
def write(self, value):
if self.tag:
w = self.writeFn(str(self.ip_address), str(self.tag), value)
def sendToDB(self):
data = {}
data['val'] = self.value
data['tagID'] = self.db_id
r = requests.post('{}/tag_val'.format(web_address), data=data, verify=False)
resp = json.loads(r.text)
print("Stored {} for {} at {}".format(resp['val'], self.name, resp['createdAt']))
self.last_send_time = time.time()

View File

@@ -1,20 +1,20 @@
#!/usr/bin/env python
'''
MySQL Tag Server
Tag Logger
Created on April 7, 2016
@author: Patrick McDonagh
@description: Continuously loops through a list of tags to store values from a PLC into a MySQL database
@description: Continuously loops through a list of tags to store values from a PLC
'''
from tag.tag import Tag
import traceback
import time
import requests
import json
import requests
from tag.tag import Tag
# DEFAULTS
web_address = "https://localhost:3000"
web_address = "https://10.10.10.10:3000"
scan_rate = 30 # seconds
save_all = "test" # use True, False, or any string
plc_handshake_tags = {}
@@ -31,7 +31,7 @@ def main():
get_tag_request_data = {'where': '{"tag_class": 5}'}
get_tag_request = requests.get('{}/tag'.format(web_address), params=get_tag_request_data, verify=False)
tags = json.loads(get_tag_request.text)
except Exception, e:
except Exception as e:
print("Error getting tags: {}".format(e))
time.sleep(10)
main()
@@ -43,7 +43,7 @@ def main():
device_types_json = json.loads(get_device_type_request.text)
for t in device_types_json:
device_types[t['id']] = t['dType']
except Exception, e:
except Exception as e:
print("Error getting tags: {}".format(e))
time.sleep(10)
main()
@@ -54,7 +54,7 @@ def main():
sr_try = json.loads(sr_req.text)
if len(sr_try) > 0:
scan_rate = int(sr_try[0]['val'])
except Exception, e:
except Exception as e:
print("Error getting scan rate: {}".format(e))
print("I'll just use {} seconds as the scan rate...".format(scan_rate))
@@ -67,7 +67,7 @@ def main():
save_all = True
elif sa_try[0]['val'].lower() == "false":
save_all = False
except Exception, e:
except Exception as e:
print("Error getting save-all: {}".format(e))
print("I'll just use {} as the save-all parameter...".format(save_all))
@@ -79,7 +79,7 @@ def main():
if len(hs_tags) > 0:
for hs in hs_tags:
plc_handshake_tags[hs['name']] = Tag(hs['name'], hs['tag'], hs['id'], hs['data_type'], hs['change_threshold'], hs['guarantee_sec'], mapFn=hs['map_function'], ip_address=hs['deviceID']['address'], device_type=device_types[hs['deviceID']['device_type']])
except Exception, e:
except Exception as e:
print("Error getting handshake tags: {}".format(e))
for t in tags:

View File

@@ -1,101 +0,0 @@
CREATE DATABASE poconsole;
USE poconsole;
CREATE TABLE IF NOT EXISTS tag_classes(
id INT NOT NULL AUTO_INCREMENT,
tag_class varchar(64),
description varchar(64),
createdAt DATETIME,
updatedAt DATETIME,
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS device_types(
id INT NOT NULL AUTO_INCREMENT,
dType VARCHAR(64),
createdAt DATETIME,
updatedAt DATETIME,
PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS devices(
id INT NOT NULL AUTO_INCREMENT,
name varchar(64),
device_type INT,
address VARCHAR(64),
createdAt DATETIME,
updatedAt DATETIME,
PRIMARY KEY (id),
INDEX device_type_ind (device_type),
FOREIGN KEY (device_type)
REFERENCES device_types(id)
);
CREATE TABLE IF NOT EXISTS tags(
id INT NOT NULL AUTO_INCREMENT,
name varchar(128),
class INT,
tag varchar(128),
deviceID INT,
description varchar(128),
data_type varchar(32),
change_threshold float,
guarantee_sec INT,
map_function varchar(64),
units varchar(64),
minExpected varchar(64),
maxExpected varchar(64),
createdAt DATETIME,
updatedAt DATETIME,
PRIMARY KEY (id),
INDEX class_ind (class),
FOREIGN KEY (class)
REFERENCES tag_classes(id)
ON DELETE CASCADE,
INDEX deviceID_ind (deviceID),
FOREIGN KEY (deviceID)
REFERENCES devices(id)
ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS tag_vals(
id INT NOT NULL AUTO_INCREMENT,
tagID int,
val float,
createdAt DATETIME,
updatedAt DATETIME,
PRIMARY KEY (id),
INDEX tagID_ind (tagID),
FOREIGN KEY (tagID)
REFERENCES tags(id)
ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS config (
id INT NOT NULL AUTO_INCREMENT,
parameter varchar(128),
val varchar(128),
createdAt DATETIME,
updatedAt DATETIME,
PRIMARY KEY (id)
);
INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (1, 'stroke', 'Stroke Information');
INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (2, 'history', 'Historical Data');
INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (3, 'gaugeoff', 'Gauge Off Data');
INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (4, 'welltest', 'Well Test Data');
INSERT INTO poconsole.tag_classes (id, tag_class, description) VALUES (5, 'custom', 'Custom tags');
INSERT INTO poconsole.device_types (id, dType) VALUES (1, "CLX");
INSERT INTO poconsole.device_types (id, dType) VALUES (2, "Micro800");
INSERT INTO poconsole.device_types (id, dType) VALUES (3, "E300");
-- INSERT INTO poconsole.device_types (id, dType) VALUES (4, "PF755");
CREATE USER 'website'@'localhost' IDENTIFIED BY 'henrypump';
GRANT ALL ON *.* TO 'website'@'localhost';
CREATE USER 'admin'@'localhost' IDENTIFIED BY 'henrypump';
GRANT ALL ON *.* to 'admin'@'localhost';
CREATE USER 'admin'@'%' IDENTIFIED BY 'henrypump';
GRANT ALL ON *.* to 'admin'@'%';
FLUSH PRIVILEGES;

View File

@@ -1,42 +0,0 @@
CREATE TABLE IF NOT EXISTS tag_classes(
id INTEGER PRIMARY KEY,
tag_class TEXT,
description TEXT
);
CREATE TABLE IF NOT EXISTS tags (
id INTEGER PRIMARY KEY,
name TEXT,
class TEXT,
tag TEXT,
description TEXT,
data_type TEXT,
change_threshold REAL,
guarantee_sec INTEGER,
map_function TEXT,
units TEXT,
minExpected REAL,
maxExpected REAL,
dateAdded TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
deleted INTEGER DEFAULT 0
);
CREATE TABLE IF NOT EXISTS tag_vals (
id INTEGER PRIMARY KEY,
tagID INTEGER,
val REAL,
dtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS config (
id INTEGER PRIMARY KEY,
parameter TEXT,
val TEXT,
dateAdded TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO tag_classes (id, tag_class, description) VALUES (1, 'stroke', 'Stroke Information');
INSERT INTO tag_classes (id, tag_class, description) VALUES (2, 'history', 'Historical Data');
INSERT INTO tag_classes (id, tag_class, description) VALUES (3, 'gaugeoff', 'Gauge Off Data');
INSERT INTO tag_classes (id, tag_class, description) VALUES (4, 'welltest', 'Well Test Data');
INSERT INTO tag_classes (id, tag_class, description) VALUES (5, 'custom', 'Custom tags');

View File

@@ -1,37 +0,0 @@
#! /bin/sh
# /etc/init.d/tagserver
### BEGIN INIT INFO
# Provides: tagserver
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Simple script to start a program at boot
# Description: A simple script from www.stuffaboutcode.com which will start / stop a program a boot / shutdown.
### END INIT INFO
# If you want a command to always run, put it here
# Carry out specific functions when asked to by the system
case "$1" in
start)
echo "Starting loggers"
kill -9 $(cat /root/tagserver.pid)
# run application you want to start
/usr/bin/python /root/tag-server/tagserver.py > /dev/null 2>&1 & echo $! > "/root/tagserver.pid"
;;
stop)
echo "Stopping loggers"
# kill application you want to stop
kill -9 $(cat /root/tagserver.pid)
;;
*)
echo "Usage: /etc/init.d/tagserver {start|stop}"
exit 1
;;
esac
exit 0

View File

@@ -1,18 +0,0 @@
(dp0
S'host'
p1
S'127.0.0.1'
p2
sS'password'
p3
S'henrypump'
p4
sS'user'
p5
S'website'
p6
sS'database'
p7
S'poconsole'
p8
s.

View File

@@ -1,11 +0,0 @@
import pickle
mysql_cfg = {
'host': '127.0.0.1',
'user': 'website',
'password': 'henrypump',
'database': 'poconsole'
}
with open('mysql_cfg.pickle', 'wb') as pickleconfig:
pickle.dump(mysql_cfg, pickleconfig)

1
tag

Submodule tag deleted from 30913bfee3

View File

@@ -1,106 +0,0 @@
#!/usr/bin/env python
'''
MySQL Tag Server
Created on April 7, 2016
@author: Patrick McDonagh
@description: Continuously loops through a list of tags to store values from a PLC into a MySQL database
'''
from tag.tag import Tag
import traceback
import time
import requests
import json
# DEFAULTS
web_address = "https://localhost:3000"
scan_rate = 30 # seconds
save_all = "test" # use True, False, or any string
plc_handshake_tags = {}
last_handshake_time = 0
tag_store = {}
device_types = {}
def main():
global web_address, scan_rate, save_all, tag_store, device_types, plc_handshake_tags, last_handshake_time
try:
# Get tags stored in database
get_tag_request_data = {'where': '{"tag_class": 5}'}
get_tag_request = requests.get('{}/tag'.format(web_address), params=get_tag_request_data, verify=False)
tags = json.loads(get_tag_request.text)
except Exception, e:
print("Error getting tags: {}".format(e))
time.sleep(10)
main()
try:
# Get tags stored in database
get_device_type_request = requests.get('{}/device_type'.format(web_address), verify=False)
device_types_json = json.loads(get_device_type_request.text)
for t in device_types_json:
device_types[t['id']] = t['dType']
except Exception, e:
print("Error getting tags: {}".format(e))
time.sleep(10)
main()
try:
sr_req_data = 'where={"parameter": "scan_rate"}'
sr_req = requests.get('{}/config?{}'.format(web_address, sr_req_data), verify=False)
sr_try = json.loads(sr_req.text)
if len(sr_try) > 0:
scan_rate = int(sr_try[0]['val'])
except Exception, e:
print("Error getting scan rate: {}".format(e))
print("I'll just use {} seconds as the scan rate...".format(scan_rate))
try:
sa_req_data = {"where": {"parameter": "save_all"}}
sa_req = requests.get('{}/config'.format(web_address), params=sa_req_data, verify=False)
sa_try = json.loads(sa_req.text)
if len(sa_try) > 0:
if sa_try[0]['val'].lower() == "true":
save_all = True
elif sa_try[0]['val'].lower() == "false":
save_all = False
except Exception, e:
print("Error getting save-all: {}".format(e))
print("I'll just use {} as the save-all parameter...".format(save_all))
try:
# Get tags stored in database
get_hs_request_data = {'where': '{"tag_class": 6}'}
get_hs_request = requests.get('{}/tag'.format(web_address), params=get_hs_request_data, verify=False)
hs_tags = json.loads(get_hs_request.text)
if len(hs_tags) > 0:
for hs in hs_tags:
plc_handshake_tags[hs['name']] = Tag(hs['name'], hs['tag'], hs['id'], hs['data_type'], hs['change_threshold'], hs['guarantee_sec'], mapFn=hs['map_function'], ip_address=hs['deviceID']['address'], device_type=device_types[hs['deviceID']['device_type']])
except Exception, e:
print("Error getting handshake tags: {}".format(e))
for t in tags:
# name, tag, db_id, data_type, change_threshold, guarantee_sec, mapFn=None, device_type='CLX', ip_address='192.168.1.10'):
tag_store[t['name']] = Tag(t['name'], t['tag'], t['id'], t['data_type'], t['change_threshold'], t['guarantee_sec'], mapFn=t['map_function'], ip_address=t['deviceID']['address'], device_type=device_types[t['deviceID']['device_type']])
while True:
for tag in tag_store:
try:
tag_store[tag].read('test')
except:
print("ERROR EVALUATING {}".format(tag))
traceback.print_exc()
if plc_handshake_tags:
if time.time() - last_handshake_time > 30.0:
for hs_tag in plc_handshake_tags:
plc_handshake_tags[hs_tag].write(1)
print("Handshake with {} - {}".format(plc_handshake_tags[hs_tag].address, hs_tag))
last_handshake_time = time.time()
time.sleep(scan_rate)
if __name__ == '__main__':
main()

View File

@@ -1,97 +0,0 @@
#!/usr/bin/env python
'''
MySQL Tag Server
Created on April 7, 2016
@author: Patrick McDonagh
@description: Continuously loops through a list of tags to store values from a PLC into a MySQL database
'''
import mysql.connector as mysqlcon
import pickle
from tag.tag_mysql import Tag
import traceback
import time
import os
with open(os.path.realpath('.') + '/mysql_cfg.pickle', 'rb') as pickleconfig:
mysql_cfg = pickle.load(pickleconfig)
if mysql_cfg:
db = mysqlcon.connect(**mysql_cfg)
tag_store = {}
configProperties = {}
def main():
db.connect()
cur = db.cursor()
query = "SELECT * FROM tags WHERE class = 5 AND deleted = 0"
cur.execute(query)
tags = cur.fetchall()
print tags
# [(1, u'Century Counter Up', 5, u'Century_Counter_Up', u'REAL', 10.0, 3600, None, 0)]
db.disconnect()
configObj = {}
db.connect()
cur = db.cursor()
query = "SELECT parameter, val FROM config GROUP BY parameter;"
cur.execute(query)
config = cur.fetchall()
db.disconnect()
for x in config:
configObj[x[0]] = x[1]
try:
configProperties['PLC_IP_ADDRESS'] = str(configObj['ip_address'])
print("FYI, using PLC IP Address from the database {0}".format(configProperties['PLC_IP_ADDRESS']))
except KeyError:
print("FYI, there is no PLC IP Address stored in the database, defaulting to 192.168.1.10")
configProperties['PLC_IP_ADDRESS'] = "192.168.1.10"
try:
configProperties['plc_type'] = str(configObj['plc_type'])
print("FYI, using PLC Type from the database {0}".format(configProperties['plc_type']))
except KeyError:
print("FYI, there is no PLC Type stored in the database, defaulting to CLX")
configProperties['plc_type'] = "CLX"
try:
configProperties['scan_rate'] = int(configObj['scan_rate'])
print("FYI, using Scan Rate from the database {0}".format(configProperties['scan_rate']))
except KeyError:
print("FYI, there is no Scan Rate stored in the database, defaulting to 10 seconds")
configProperties['scan_rate'] = 10
try:
sa_test = str(configObj['save_all'])
if sa_test.lower() == "true":
configProperties['save_all'] = True
elif sa_test.lower() == "false":
configProperties['save_all'] = False
else:
configProperties['save_all'] = "test"
print("FYI, value for save_all is {0}".format(configProperties['save_all']))
except KeyError:
print("FYI, there is no save_all value stored in the database, using 'test'")
configProperties['save_all'] = 'test'
for t in tags:
tag_store[t[1]] = Tag(t[1], t[3], t[0], t[5], t[6], t[7], mapFn=t[8], device_type=configProperties['plc_type'], ip_address=configProperties['PLC_IP_ADDRESS'])
while True:
for tag in tag_store:
try:
tag_store[tag].read(configProperties['save_all'])
except:
print("ERROR EVALUATING {}".format(tag))
traceback.print_exc()
time.sleep(configProperties['scan_rate'])
if __name__ == '__main__':
main()

View File

@@ -1,96 +0,0 @@
#!/usr/bin/env python
'''
Created on Dec 8, 2015
@author: Patrick McDonagh
'''
import time
import sqlite3 as lite
from tag.tag_sqlite import Tag
import traceback
# con = lite.connect("/usr/db/data.db")
con = lite.connect('/mnt/usb/data.db')
configProperties = {}
def main():
with con:
cur = con.cursor()
query = "SELECT * FROM tags WHERE deleted = 0;"
cur.execute(query)
tags = cur.fetchall()
configObj = {}
with con:
cur = con.cursor()
query = "SELECT parameter, val FROM config GROUP BY parameter;"
cur.execute(query)
config = cur.fetchall()
for x in config:
configObj[x[0]] = x[1]
try:
configProperties['PLC_IP_ADDRESS'] = str(configObj['ip_address'])
print("FYI, using PLC IP Address from the database {0}".format(configProperties['PLC_IP_ADDRESS']))
except KeyError:
print("FYI, there is no PLC IP Address stored in the database, defaulting to 192.168.1.10")
configProperties['PLC_IP_ADDRESS'] = "192.168.1.10"
try:
configProperties['plc_type'] = str(configObj['plc_type'])
print("FYI, using PLC Type from the database {0}".format(configProperties['plc_type']))
except KeyError:
print("FYI, there is no PLC Type stored in the database, defaulting to CLX")
configProperties['plc_type'] = "CLX"
try:
configProperties['scan_rate'] = int(configObj['scan_rate'])
print("FYI, using Scan Rate from the database {0}".format(configProperties['scan_rate']))
except KeyError:
print("FYI, there is no Scan Rate stored in the database, defaulting to 10 seconds")
configProperties['scan_rate'] = 10
try:
sa_test = str(configObj['save_all'])
if sa_test.lower() == "true":
configProperties['save_all'] = True
elif sa_test.lower() == "false":
configProperties['save_all'] = False
else:
configProperties['save_all'] = "test"
print("FYI, value for save_all is {0}".format(configProperties['save_all']))
except KeyError:
print("FYI, there is no save_all value stored in the database, using 'test'")
configProperties['save_all'] = 'test'
tag_store = {}
if len(tags) > 0:
for t in tags:
# (1, u'Pump Intake Pressure', u'5', u'Pump_Intake_Pressure', u'Pressure at the Intake of the Pump', None, 100.0, 3600, u'PSI', 0.0, 3000.0, u'2016-04-13 21:27:01', 0)
tag_store[t[1]] = Tag(t[1], t[3], t[0], t[5], t[6], t[7], mapFn=t[8], device_type=configProperties['plc_type'], ip_address=configProperties['PLC_IP_ADDRESS'])
while True:
for tag in tag_store:
try:
tag_store[tag].read(configProperties['save_all'])
except:
print("ERROR EVALUATING {}".format(tag))
traceback.print_exc()
time.sleep(configProperties['scan_rate'])
if __name__ == '__main__':
main()

View File

@@ -1,22 +1,12 @@
FROM hypriot/rpi-node:latest
FROM node:latest
RUN apt-get -y update && apt-get install -y apt-utils dialog vim
RUN apt-get -y update && apt-get install -y apt-utils dialog vim git
COPY mysql-install.sh /tmp/mysql-install.sh
COPY taglogger_db_structure.sql /tmp/taglogger_db_structure.sql
RUN chmod +x /tmp/mysql-install.sh && /tmp/mysql-install.sh
RUN apt-get install -y python git-core wget
RUN wget https://bootstrap.pypa.io/get-pip.py
RUN python get-pip.py
RUN pip install requests
RUN git clone https://github.com/ruscito/pycomm.git
RUN cd pycomm && python setup.py install && cd ..
RUN mkdir /root/tag-logger
COPY init /root/tag-logger/init
COPY tag /root/tag-logger/tag
COPY www /root/tag-logger/www
COPY taglogger.py /root/tag-logger/taglogger.py
COPY startup.sh /root/startup.sh
RUN chmod +x /root/startup.sh
@@ -24,6 +14,9 @@ RUN chmod +x /root/startup.sh
RUN npm install -g bower pm2 sails --silent
RUN cd /root/tag-logger/www && npm install && bower install --allow-root && cd /
RUN apt-get clean
RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
CMD '/root/startup.sh'

View File

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 16 KiB

View File

Before

Width:  |  Height:  |  Size: 130 KiB

After

Width:  |  Height:  |  Size: 130 KiB

View File

Before

Width:  |  Height:  |  Size: 377 KiB

After

Width:  |  Height:  |  Size: 377 KiB

View File

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

View File

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 16 KiB

View File

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 24 KiB

View File

Before

Width:  |  Height:  |  Size: 20 KiB

After

Width:  |  Height:  |  Size: 20 KiB

View File

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 21 KiB

View File

Before

Width:  |  Height:  |  Size: 36 KiB

After

Width:  |  Height:  |  Size: 36 KiB

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

Before

Width:  |  Height:  |  Size: 6.2 KiB

After

Width:  |  Height:  |  Size: 6.2 KiB

View File

Before

Width:  |  Height:  |  Size: 7.8 KiB

After

Width:  |  Height:  |  Size: 7.8 KiB

View File

Before

Width:  |  Height:  |  Size: 32 KiB

After

Width:  |  Height:  |  Size: 32 KiB

View File

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 16 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 20 KiB

After

Width:  |  Height:  |  Size: 20 KiB

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 21 KiB

View File

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 24 KiB

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 17 KiB

View File

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 21 KiB

View File

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 17 KiB

View File

Before

Width:  |  Height:  |  Size: 4.3 KiB

After

Width:  |  Height:  |  Size: 4.3 KiB

View File

Before

Width:  |  Height:  |  Size: 5.7 KiB

After

Width:  |  Height:  |  Size: 5.7 KiB

View File

Before

Width:  |  Height:  |  Size: 9.6 KiB

After

Width:  |  Height:  |  Size: 9.6 KiB

View File

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

View File

Before

Width:  |  Height:  |  Size: 20 KiB

After

Width:  |  Height:  |  Size: 20 KiB

View File

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 17 KiB

View File

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 24 KiB

View File

Before

Width:  |  Height:  |  Size: 26 KiB

After

Width:  |  Height:  |  Size: 26 KiB

View File

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

View File

Before

Width:  |  Height:  |  Size: 41 KiB

After

Width:  |  Height:  |  Size: 41 KiB

View File

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 11 KiB

View File

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 11 KiB

View File

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View File

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 16 KiB

View File

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 10 KiB

View File

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 19 KiB

View File

Before

Width:  |  Height:  |  Size: 9.9 KiB

After

Width:  |  Height:  |  Size: 9.9 KiB

View File

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 21 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

View File

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 17 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 8.6 KiB

After

Width:  |  Height:  |  Size: 8.6 KiB

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 19 KiB

View File

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 16 KiB

Some files were not shown because too many files have changed in this diff Show More