diff --git a/daq/Dockerfile b/daq/Dockerfile index 3e468d9..7a4e075 100644 --- a/daq/Dockerfile +++ b/daq/Dockerfile @@ -1,12 +1,12 @@ FROM python:latest -# Copy source files -RUN mkdir /root/tag-logger -COPY taglogger.py /root/tag-logger/taglogger.py - # Install some python packages RUN pip install requests RUN pip install git+https://github.com/Henry-Pump/Pycomm-Helper.git RUN pip install git+https://github.com/ruscito/pycomm.git -CMD ["python", "/root/tag-logger/taglogger.py"] +# Copy source files +RUN mkdir /root/tag-logger +COPY taglogger.py /root/tag-logger/taglogger.py + +CMD ["python", "-u", "/root/tag-logger/taglogger.py"] diff --git a/daq/Dockerfile.rpi b/daq/Dockerfile.rpi index 2e1489e..9e3ecbf 100644 --- a/daq/Dockerfile.rpi +++ b/daq/Dockerfile.rpi @@ -2,7 +2,6 @@ FROM resin/rpi-raspbian:jessie # Copy source files RUN mkdir /root/tag-logger -COPY taglogger.py /root/tag-logger/taglogger.py RUN apt-get update && apt-get install -y python python-pip git RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* @@ -12,4 +11,6 @@ RUN pip install requests RUN pip install git+https://github.com/Henry-Pump/Pycomm-Helper.git RUN pip install git+https://github.com/ruscito/pycomm.git -CMD ["python", "/root/tag-logger/taglogger.py"] +COPY taglogger.py /root/tag-logger/taglogger.py + +CMD ["python", "-u", "/root/tag-logger/taglogger.py"] diff --git a/daq/Dockerfile.ubuntu b/daq/Dockerfile.ubuntu index 3e468d9..7a4e075 100644 --- a/daq/Dockerfile.ubuntu +++ b/daq/Dockerfile.ubuntu @@ -1,12 +1,12 @@ FROM python:latest -# Copy source files -RUN mkdir /root/tag-logger -COPY taglogger.py /root/tag-logger/taglogger.py - # Install some python packages RUN pip install requests RUN pip install git+https://github.com/Henry-Pump/Pycomm-Helper.git RUN pip install git+https://github.com/ruscito/pycomm.git -CMD ["python", "/root/tag-logger/taglogger.py"] +# Copy source files +RUN mkdir /root/tag-logger +COPY taglogger.py /root/tag-logger/taglogger.py + +CMD ["python", "-u", "/root/tag-logger/taglogger.py"] diff --git a/daq/taglogger.py b/daq/taglogger.py index 84e1d99..839c13b 100644 --- a/daq/taglogger.py +++ b/daq/taglogger.py @@ -11,7 +11,6 @@ import time import json import requests from requests.packages.urllib3.exceptions import InsecureRequestWarning -# from pycomm_helper.tag import Tag from pycomm.ab_comm.clx import Driver as ClxDriver from pycomm.cip.cip_base import CommError @@ -19,8 +18,8 @@ from pycomm.cip.cip_base import CommError requests.packages.urllib3.disable_warnings(InsecureRequestWarning) # DEFAULTS -db_address = "10.0.0.103" -db_url = "https://{}".format(db_address) +db_address = "web_db" +db_url = "https://{}:5000".format(db_address) scan_rate = 30 # seconds save_all = "test" # use True, False, or any string plc_handshake_tags = {} @@ -32,7 +31,7 @@ handshake_list = [] device_types = {} -def readFromPLC(addr, tag): +def read_from_plc(addr, tag): """Read a value from a PLC.""" addr = str(addr) tag = str(tag) @@ -50,6 +49,26 @@ def readFromPLC(addr, tag): c.close() +def write_to_plc(addr, tag, value): + """Write a value to a tag in the PLC at the specified address.""" + pv = read_from_plc(addr, tag) + if pv: + c = ClxDriver() + if c.open(addr): + try: + v = c.write_tag(tag, value, pv[1]) + c.close() + return True + except Exception: + print("ERROR WRITING TAG: {} at {}".format(tag, addr)) + err = c.get_status() + c.close() + print(err) + return False + c.close() + return False + + def store_tag(tag): """Store the value of a tag in the web_db.""" global db_url @@ -103,7 +122,7 @@ def load_data(): new_tags = [t['name'] for t in tags['objects']] existing_tags = [t['name'] for t in tag_list] - existing_handshakes = [t['name'] for h in handshake_list] + existing_handshakes = [h['name'] for h in handshake_list] tags_to_add = [] handshakes_to_add = [] @@ -130,7 +149,7 @@ def load_data(): tags_to_copy.append(this_tag) elif t in existing_handshakes: for e_h in handshake_list: - if e_h['name'] == h: + if e_h['name'] == t: this_tag['last_stored'] = e_h['last_stored'] this_tag['last_store_time'] = e_h['last_store_time'] handshakes_to_copy.append(this_tag) @@ -150,11 +169,16 @@ def main(): while True: load_data() # print(tag_list) + if len(tag_list + handshake_list) == 0: + print("No tags configured. Trying again in 10 seconds.") + time.sleep(10) + main() + if len(tag_list) > 0: for i in range(0, len(tag_list)): try: - val = readFromPLC(tag_list[i]['ip_address'], tag_list[i]['tag'])[0] + val = read_from_plc(tag_list[i]['ip_address'], tag_list[i]['tag'])[0] now = time.time() store_value = abs(val - tag_list[i]['last_stored']) > tag_list[i]['change_threshold'] @@ -163,9 +187,11 @@ def main(): if store_value or store_time or (save_all == "true"): store_reason = "" if store_time: - store_reason = "time delta = {} > {}".format(now - tag_list[i]['last_store_time'], tag_list[i]['guarantee_sec']) + store_reason = "time delta = {} > {}".format(now - tag_list[i]['last_store_time'], + tag_list[i]['guarantee_sec']) elif store_value: - store_reason = "value delta = {} > {}".format(abs(val - tag_list[i]['last_stored']), tag_list[i]['change_threshold']) + store_reason = "value delta = {} > {}".format(abs(val - tag_list[i]['last_stored']), + tag_list[i]['change_threshold']) elif save_all == "true": store_reason = "save all parameter" @@ -175,21 +201,25 @@ def main(): print("Stored {} for {} at {} due to {}".format(val, tag_list[i]['name'], now, store_reason)) except CommError: - print("CommError: Error connecting to {} for {}".format(tag_list[i]['ip_address'], tag_list[i]['name'])) - traceback.print_exc() + print("CommError: Error connecting to {} for {}".format(tag_list[i]['ip_address'], + tag_list[i]['name'])) except TypeError: print("Error reading {}".format(tag_list[i]['name'])) - else: - print("No tags in tag_list. Trying again in 10 seconds.") - time.sleep(10) - main() - # if plc_handshake_tags: - # if time.time() - last_handshake_time > 30.0: - # for hs_tag in plc_handshake_tags: - # plc_handshake_tags[hs_tag].write(1) - # print("Handshake with {} - {}".format(plc_handshake_tags[hs_tag].address, hs_tag)) - # last_handshake_time = time.time() + if len(handshake_list) > 0: + for h in range(0, len(handshake_list)): + now = time.time() + if (now - handshake_list[h]['last_store_time']) > handshake_list[h]['guarantee_sec']: + try: + write_to_plc(handshake_list[h]['ip_address'], handshake_list[h]['tag'], 1) + handshake_list[h]['last_store_time'] = now + print("Handshake with {} - {} at {}".format(handshake_list[h]['ip_address'], + handshake_list[h]['tag'], now)) + except CommError: + print("CommError: Error connecting to {} for {}".format(handshake_list[h]['ip_address'], + handshake_list[h]['name'])) + except TypeError: + print("Error writing {}".format(tag_list[i]['name'])) time.sleep(scan_rate) diff --git a/daq_sample/Dockerfile.rpi b/daq_sample/Dockerfile.rpi index b12f5c7..d9dda9d 100644 --- a/daq_sample/Dockerfile.rpi +++ b/daq_sample/Dockerfile.rpi @@ -1,14 +1,16 @@ -FROM patrickjmcd/rpi-python3:latest +FROM resin/rpi-raspbian:jessie -# Copy source files +# Copy source files RUN mkdir /root/tag-logger -COPY sampleData.py /root/tag-logger/sampleData.py -COPY pycomm-master /tmp/pycomm -COPY pycomm_helper /tmp/pycomm_helper +RUN apt-get update && apt-get install -y python python-pip git +RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* # Install some python packages +RUN pip install --upgrade pip RUN pip install requests -RUN cd /tmp/pycomm && python setup.py install && cd / -RUN cd /tmp/pycomm_helper && python setup.py install && cd / +RUN pip install git+https://github.com/Henry-Pump/Pycomm-Helper.git +RUN pip install git+https://github.com/ruscito/pycomm.git -CMD ["python", "/root/tag-logger/sampleData.py"] +COPY sampleData.py /root/tag-logger/taglogger.py + +CMD ["python", "-u", "/root/tag-logger/taglogger.py"] diff --git a/daq_sample/Dockerfile.ubuntu b/daq_sample/Dockerfile.ubuntu index c2ebea2..a946e3e 100644 --- a/daq_sample/Dockerfile.ubuntu +++ b/daq_sample/Dockerfile.ubuntu @@ -1,14 +1,12 @@ FROM python:latest -# Copy source files -RUN mkdir /root/tag-logger -COPY sampleData.py /root/tag-logger/sampleData.py -COPY pycomm-master /tmp/pycomm -COPY pycomm_helper /tmp/pycomm_helper - # Install some python packages RUN pip install requests -RUN cd /tmp/pycomm && python setup.py install && cd / -RUN cd /tmp/pycomm_helper && python setup.py install && cd / +RUN pip install git+https://github.com/Henry-Pump/Pycomm-Helper.git +RUN pip install git+https://github.com/ruscito/pycomm.git -CMD ["python", "/root/tag-logger/sampleData.py"] +# Copy source files +RUN mkdir /root/tag-logger +COPY sampleData.py /root/tag-logger/taglogger.py + +CMD ["python", "-u", "/root/tag-logger/taglogger.py"] diff --git a/daq_sample/sampleData.py b/daq_sample/sampleData.py index 51fe2ab..19e1062 100644 --- a/daq_sample/sampleData.py +++ b/daq_sample/sampleData.py @@ -1,107 +1,211 @@ -#!/usr/bin/env python +""" +Tag Logger. -''' -Sample Tag generator Created on April 7, 2016 @author: Patrick McDonagh -@description: Continuously loops through a list of tags to store values from a PLC into a MySQL database -''' +@description: Continuously loops through a list of tags to store values from a PLC +""" -from pycomm_helper.tag import Tag import traceback import time -import random -import requests import json +import requests +from requests.packages.urllib3.exceptions import InsecureRequestWarning +# from pycomm_helper.tag import Tag +from pycomm.ab_comm.clx import Driver as ClxDriver +from pycomm.cip.cip_base import CommError +from random import random, getrandbits + + +requests.packages.urllib3.disable_warnings(InsecureRequestWarning) # DEFAULTS -db_address = "10.10.10.10:5000" -db_url = "https://{}".format(db_address) +db_address = 'web_db' +db_url = "https://{}:5000".format(db_address) scan_rate = 30 # seconds save_all = "test" # use True, False, or any string - - -class Sample(Tag): - def read(self, forceSend): - writeToDB = False - if self.tag: - v = 0.0 - if not (self.value is None): - v = [self.value + (10.0 * (random.random() - 0.5))] - else: - v = [random.random() * 100.0] - if v: - val = v[0] - if self.data_type == 'BOOL' or self.data_type == 'STRING': - if self.mapFn: - val = self.mapFn[val] - if (self.last_send_time == 0) or (self.value is None) or not (self.value == val) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend is True): - self.last_value = self.value - self.value = val - writeToDB = True - else: - writeToDB = False - else: - if (self.last_send_time == 0) or (self.value is None) or (abs(self.value - v[0]) > self.chg_threshold) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend is True): - self.last_value = self.value - self.value = v[0] - writeToDB = True - else: - writeToDB = False - if forceSend is False: - writeToDB = False - if writeToDB: - self.sendToDB() - return self.value +plc_handshake_tags = {} +last_handshake_time = 0 tag_store = {} +tag_list = [] +handshake_list = [] +device_types = {} -def main(): - global db_address, scan_rate, save_all +def readFromPLC(addr, tag): + """Read a value from a PLC.""" + addr = str(addr) + tag = str(tag) + c = ClxDriver() + if c.open(addr): + try: + v = c.read_tag(tag) + return v + except Exception: + print("ERROR RETRIEVING TAG: {} at {}".format(tag, addr)) + err = c.get_status() + c.close() + print(err) + pass + c.close() + + +def store_tag(tag): + """Store the value of a tag in the web_db.""" + global db_url + url = "{}/api/tag_vals".format(db_url) + tag_val_obj = { + "tag_id": tag['id'], + "value": tag['last_stored'] + } + headers = {"Content-Type": "application/json"} + r = requests.post(url, data=json.dumps(tag_val_obj), headers=headers, verify=False) + return r.status_code == 200 + + +def load_data(): + """Load configuration data from the web server.""" + global db_url, scan_rate, save_all, tag_list, handshake_list try: # Get tags stored in database - get_tag_request = requests.get('{}/api/tags'.format(db_url), verify=False) - tags = json.loads(get_tag_request.text)['objects'] + url = '{}/api/tags'.format(db_url) + get_tag_request = requests.get(url, verify=False) + tags = json.loads(get_tag_request.text) except Exception as e: print("Error getting tags: {}".format(e)) time.sleep(10) main() try: - sr_req_data = 'where={"parameter": "scan_rate"}' - sr_req = requests.get('{}/config?{}'.format(db_url, sr_req_data), verify=False) - sr_try = json.loads(sr_req.text) - if len(sr_try) > 0: - scan_rate = int(sr_try[0]['val']) + # Get device types stored in database + get_device_type_request = requests.get('{}/api/device_types'.format(db_url), verify=False) + device_types_json = json.loads(get_device_type_request.text) + for t in device_types_json['objects']: + device_types[t['id']] = t['device_type'] except Exception as e: - print("Error getting scan rage: {}".format(e)) - print("I'll just use {} seconds as the scan rate...".format(scan_rate)) + print("Error getting tags: {}".format(e)) + time.sleep(10) + main() try: - sa_req_data = {"where": {"parameter": "save_all"}} - sa_req = requests.get('{}/config'.format(db_url), params=sa_req_data, verify=False) - sa_try = json.loads(sa_req.text) - if len(sa_try) > 0: - if sa_try[0]['val'].lower() == "true": - save_all = True - elif sa_try[0]['val'].lower() == "false": - save_all = False + config_req = requests.get('{}/api/configs'.format(db_url), verify=False) + config_json = json.loads(config_req.text) + config_list = config_json['objects'] + + if len(config_list) > 0: + for c in config_list: + if c['parameter'] == "scan_rate": + scan_rate = float(c['val']) + elif c['parameter'] == "save_all": + save_all = c['val'] except Exception as e: - print("Error getting save-all: {}".format(e)) - print("I'll just use {} as the save-all parameter...".format(save_all)) - for t in tags: - # name, tag, db_id, data_type, change_threshold, guarantee_sec, mapFn=None, device_type='CLX', ip_address='192.168.1.10'): - tag_store[t['name']] = Sample(t['name'], t['tag'], t['id'], t['data_type_id'], t['change_threshold'], t['guarantee_sec'], mapFn=t['map_function'], ip_address=t['device']['address'], db_address=db_address) + print("Error getting configs: {}".format(e)) + + new_tags = [t['name'] for t in tags['objects']] + existing_tags = [t['name'] for t in tag_list] + existing_handshakes = [h['name'] for h in handshake_list] + + tags_to_add = [] + handshakes_to_add = [] + tags_to_copy = [] + handshakes_to_copy = [] + for t in new_tags: + this_tag = {} + for n_t in tags['objects']: + if n_t['name'] == t: + this_tag["tag"] = n_t['tag'] + this_tag["id"] = n_t["id"] + this_tag["name"] = n_t['name'] + this_tag["change_threshold"] = n_t['change_threshold'] + this_tag["guarantee_sec"] = n_t['guarantee_sec'] + this_tag["ip_address"] = n_t['device']['address'] + this_tag["device_type"] = device_types[n_t['device']['device_type_id']] + this_tag["last_stored"] = 0.0 + this_tag["last_store_time"] = 0 + if t in existing_tags: + for e_t in tag_list: + if e_t['name'] == t: + this_tag['last_stored'] = e_t['last_stored'] + this_tag['last_store_time'] = e_t['last_store_time'] + tags_to_copy.append(this_tag) + elif t in existing_handshakes: + for e_h in handshake_list: + if e_h['name'] == t: + this_tag['last_stored'] = e_h['last_stored'] + this_tag['last_store_time'] = e_h['last_store_time'] + handshakes_to_copy.append(this_tag) + else: + if n_t['tag_class_id'] == 5: + tags_to_add.append(this_tag) + elif n_t['tag_class_id'] == 6: + handshakes_to_add.append(this_tag) + tag_list = tags_to_add + tags_to_copy + handshake_list = handshakes_to_add + handshakes_to_copy + + +def main(): + """Run the main routine.""" + global scan_rate, tag_store, device_types, tag_list, handshake_list, save_all while True: - for tag in tag_store: - try: - tag_store[tag].read('test') - except: - print("ERROR EVALUATING {}".format(tag)) - traceback.print_exc() - time.sleep(scan_rate) + load_data() + # print(tag_list) + if len(tag_list + handshake_list) == 0: + print("No tags configured. Trying again in 10 seconds.") + time.sleep(10) + main() + + if len(tag_list) > 0: + for i in range(0, len(tag_list)): + try: + pos_neg = 1.0 + if bool(getrandbits(1)): + pos_neg = -1.0 + val = tag_list[i]['last_stored'] + pos_neg * random * 100.0 + now = time.time() + + store_value = abs(val - tag_list[i]['last_stored']) > tag_list[i]['change_threshold'] + store_time = (now - tag_list[i]['last_store_time']) > tag_list[i]['guarantee_sec'] + + if store_value or store_time or (save_all == "true"): + store_reason = "" + if store_time: + store_reason = "time delta = {} > {}".format(now - tag_list[i]['last_store_time'], + tag_list[i]['guarantee_sec']) + elif store_value: + store_reason = "value delta = {} > {}".format(abs(val - tag_list[i]['last_stored']), + tag_list[i]['change_threshold']) + elif save_all == "true": + store_reason = "save all parameter" + + tag_list[i]['last_stored'] = val + tag_list[i]['last_store_time'] = now + store_tag(tag_list[i]) + + print( + "Stored {} for {} at {} due to {}".format(val, tag_list[i]['name'], now, store_reason)) + except CommError: + print("CommError: Error connecting to {} for {}".format(tag_list[i]['ip_address'], + tag_list[i]['name'])) + except TypeError: + print("Error reading {}".format(tag_list[i]['name'])) + + if len(handshake_list) > 0: + for h in range(0, len(handshake_list)): + now = time.time() + if (now - handshake_list[h]['last_store_time']) > handshake_list[h]['guarantee_sec']: + try: + handshake_list[h]['last_store_time'] = now + print("Handshake with {} - {} at {}".format(handshake_list[h]['ip_address'], + handshake_list[h]['tag'], now)) + except CommError: + print("CommError: Error connecting to {} for {}".format(handshake_list[h]['ip_address'], + handshake_list[h]['name'])) + except TypeError: + print("Error writing {}".format(tag_list[i]['name'])) + time.sleep(scan_rate) + if __name__ == '__main__': main() diff --git a/docker-compose-rpi.yml b/docker-compose-rpi.yml index 593bba5..33cee46 100644 --- a/docker-compose-rpi.yml +++ b/docker-compose-rpi.yml @@ -4,12 +4,23 @@ services: image: docker.henrypump.cloud/datalogger/rpi-web_db ports: - "443:5000" + - "6603:3306" + restart: on-failure daq: image: docker.henrypump.cloud/datalogger/rpi-daq depends_on: - web_db links: - web_db + restart: on-failure + portainer: + image: portainer/portainer + command: --templates http://templates/templates.json + volumes: + - /var/run/docker.sock:/var/run/docker.sock + ports: + - "9000:9000" + restart: on-failure # networks: diff --git a/docker-compose-test.yml b/docker-compose-test.yml new file mode 100644 index 0000000..1ef264c --- /dev/null +++ b/docker-compose-test.yml @@ -0,0 +1,34 @@ +version : '2' +services: + web_db: + image: docker.henrypump.cloud/datalogger/web_db + ports: + - "443:5000" + - "6603:3306" + restart: on-failure + daq: + image: docker.henrypump.cloud/datalogger/daq_sample + depends_on: + - web_db + links: + - web_db:web_db + restart: on-failure + portainer: + image: portainer/portainer + command: --templates http://templates/templates.json + volumes: + - /var/run/docker.sock:/var/run/docker.sock + ports: + - "9000:9000" + restart: on-failure + + +# networks: +# poconsole: +# driver: bridge +# driver_opts: +# com.docker.network.enable_ipv4: "true" +# ipam: +# driver: default +# config: +# - subnet: 10.10.10.0/24 diff --git a/docker-compose.yml b/docker-compose.yml index bd94853..8032ad3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,13 +4,23 @@ services: image: docker.henrypump.cloud/datalogger/web_db ports: - "443:5000" + - "6603:3306" + restart: on-failure daq: image: docker.henrypump.cloud/datalogger/daq depends_on: - web_db links: - web_db - + restart: on-failure + portainer: + image: portainer/portainer + command: --templates http://templates/templates.json + volumes: + - /var/run/docker.sock:/var/run/docker.sock + ports: + - "9000:9000" + restart: on-failure # networks: # poconsole: diff --git a/web_db/Dockerfile b/web_db/Dockerfile index bbf1982..e7a53d6 100644 --- a/web_db/Dockerfile +++ b/web_db/Dockerfile @@ -5,7 +5,7 @@ COPY mysql-install.sh /tmp/mysql-install.sh RUN chmod +x /tmp/mysql-install.sh && /tmp/mysql-install.sh RUN mkdir /root/tag-logger -COPY flask /root/tag-logger/flask + COPY mysql-connector-python-2.1.4 /tmp/mysql RUN cd /tmp/mysql && python setup.py install && cd ~ @@ -18,6 +18,7 @@ RUN pip install flask flask-restless flask-sqlalchemy pyopenssl RUN apt-get clean RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* +COPY flask /root/tag-logger/flask RUN service mysql restart && python /root/tag-logger/flask/setupdb.py CMD ["/root/startup.sh"] diff --git a/web_db/Dockerfile.ubuntu b/web_db/Dockerfile.ubuntu index bbf1982..b77f972 100644 --- a/web_db/Dockerfile.ubuntu +++ b/web_db/Dockerfile.ubuntu @@ -5,7 +5,7 @@ COPY mysql-install.sh /tmp/mysql-install.sh RUN chmod +x /tmp/mysql-install.sh && /tmp/mysql-install.sh RUN mkdir /root/tag-logger -COPY flask /root/tag-logger/flask + COPY mysql-connector-python-2.1.4 /tmp/mysql RUN cd /tmp/mysql && python setup.py install && cd ~ @@ -18,6 +18,8 @@ RUN pip install flask flask-restless flask-sqlalchemy pyopenssl RUN apt-get clean RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* +COPY flask /root/tag-logger/flask RUN service mysql restart && python /root/tag-logger/flask/setupdb.py CMD ["/root/startup.sh"] + diff --git a/web_db/flask/app/__init__.py b/web_db/flask/app/__init__.py index 041239a..f628756 100644 --- a/web_db/flask/app/__init__.py +++ b/web_db/flask/app/__init__.py @@ -7,8 +7,10 @@ from werkzeug.utils import secure_filename from sqlalchemy import and_ import mysql.connector + +DAQ_HOSTNAME = "daq" UPLOAD_FOLDER = '/root/tag-server/flask/app/docs' -ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'doc', 'docx', 'xls', 'xlsx', 'zip']) +ALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'doc', 'docx', 'xls', 'xlsx', 'zip'} app = Flask('app', static_url_path='') app.config.update( @@ -21,6 +23,7 @@ app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 app.secret_key = 'henry_pump' db = SQLAlchemy(app) + def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS @@ -34,9 +37,14 @@ def catch_all(path): from .datalogger import datalogger from .datalogger.models import * + @app.route('/api/latest') def get_latest_tag_vals(): - res = db.engine.execute('SELECT v1.id as id, v1.created_on as dtime, t.id as t_id, t.name as name, t.tag as tag, v1.value as value, t.units as units, t.description as description, t.min_expected as min_expected, t.max_expected as max_expected FROM tag_vals v1 INNER JOIN tags t ON t.id = v1.tag_id WHERE v1.id = (SELECT v2.id FROM tag_vals v2 WHERE v2.tag_id = v1.tag_id ORDER BY v2.id DESC LIMIT 1) ORDER BY t.id') + res = db.engine.execute('SELECT v1.id as id, v1.created_on as dtime, t.id as t_id, t.name as name, t.tag as tag, ' + 'v1.value as value, t.units as units, t.description as description, ' + 't.min_expected as min_expected, t.max_expected as max_expected FROM tag_vals v1 ' + 'INNER JOIN tags t ON t.id = v1.tag_id WHERE v1.id = (SELECT v2.id FROM tag_vals v2 ' + 'WHERE v2.tag_id = v1.tag_id ORDER BY v2.id DESC LIMIT 1) ORDER BY t.id') lat = res.fetchall() latest_tags = list(map(latest_to_obj, lat)) return jsonify(latest_tags) @@ -45,7 +53,8 @@ def get_latest_tag_vals(): @app.route('/api/valuesbetween///') def get_tag_vals_between(ids, start, end): ids = ids.split(',') - res = Tag_val.query.filter(and_(Tag_val.tag_id.in_(ids), Tag_val.created_on > start, Tag_val.created_on <= end)).all() + res = Tag_val.query.filter(and_(Tag_val.tag_id.in_(ids), Tag_val.created_on > start, + Tag_val.created_on <= end)).all() return jsonify([i.serialize for i in res]) @@ -55,21 +64,22 @@ def get_multiple_tags(ids): res = Tag.query.filter(Tag.id.in_(ids)).all() return jsonify([i.serialize for i in res]) + @app.route('/doc/upload', methods=['POST']) def upload_file(): # check if the post request has the file part if 'file' not in request.files: flash('No file part') return redirect("/#/docs") - file = request.files['file'] + upl_file = request.files['file'] # if user does not select file, browser also # submit a empty part without filename - if file.filename == '': + if upl_file.filename == '': flash('No selected file') return redirect("/#/docs") - if file and allowed_file(file.filename): - filename = secure_filename(file.filename) - file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) + if upl_file and allowed_file(upl_file.filename): + filename = secure_filename(upl_file.filename) + upl_file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) d = Doc(name=filename) db.session.add(d) db.session.commit() @@ -77,11 +87,13 @@ def upload_file(): filename=filename)) return redirect("/#/docs") + @app.route('/docs/') def uploaded_file(filename): return send_from_directory(app.config['UPLOAD_FOLDER'], filename) + @app.route('/csv/all') def get_csv_all(): csv_string = "datetime," @@ -95,13 +107,15 @@ def get_csv_all(): val_objs = [{'value': x['value'], 'tag_name': x['tag']['name'], 'datetime': x['created_on']} for x in all_vals] for v in val_objs: tag_ind = all_tag_names.index(v['tag_name']) - csv_string += "{},".format(v['datetime']) + "," * tag_ind + "{},".format(v['value']) + "," * (len(all_tag_names) - tag_ind) + "\n" + csv_string += "{},".format(v['datetime']) + "," * tag_ind + "{},".format(v['value']) + \ + "," * (len(all_tag_names) - tag_ind) + "\n" return Response( csv_string, mimetype="text/csv", headers={"Content-disposition": "attachment; filename=datadump.csv"}) + @app.route('/csv/') def get_csv_selected(ids): csv_string = "datetime," @@ -115,9 +129,18 @@ def get_csv_selected(ids): val_objs = [{'value': x['value'], 'tag_name': x['tag']['name'], 'datetime': x['created_on']} for x in all_vals] for v in val_objs: tag_ind = all_tag_names.index(v['tag_name']) - csv_string += "{},".format(v['datetime']) + "," * tag_ind + "{},".format(v['value']) + "," * (len(all_tag_names) - tag_ind) + "\n" + csv_string += "{},".format(v['datetime']) + "," * tag_ind + "{},".format(v['value']) + \ + "," * (len(all_tag_names) - tag_ind) + "\n" return Response( csv_string, mimetype="text/csv", headers={"Content-disposition": - "attachment; filename=datadump{}.csv".format(ids.replace(",","-"))}) + "attachment; filename=datadump{}.csv".format(ids.replace(",", "-"))}) + + +@app.route("/logger_status") +def get_logger_status(): + ping_response = os.system("ping -c 1 " + DAQ_HOSTNAME) + if ping_response == 0: + return jsonify({"status": "up"}) + return jsonify({"status": "down"}) diff --git a/web_db/flask/app/static/js/config.controller.js b/web_db/flask/app/static/js/config.controller.js index 7fd7f4a..7f274b4 100644 --- a/web_db/flask/app/static/js/config.controller.js +++ b/web_db/flask/app/static/js/config.controller.js @@ -49,10 +49,15 @@ poconsole.controller('configCtrl', function($scope, Page, $log, config, devices, var checkLoggerStatus = config.getLoggerStatus(); checkLoggerStatus.then(function(data){ $scope.loggerLoading = false; - $scope.loggerRunning = data.status; + console.log("Logger Status = " + data.status); + if (data.status == "up"){ + $scope.loggerRunning = true; + } else { + $scope.loggerRunning = false; + } }); }; - // $scope.checkLogger(); + $scope.checkLogger(); $scope.restartLogger = function(){ var restartLogger = config.restartLogger(); diff --git a/web_db/flask/app/static/js/config.factory.js b/web_db/flask/app/static/js/config.factory.js index 23df2e3..183a2a2 100644 --- a/web_db/flask/app/static/js/config.factory.js +++ b/web_db/flask/app/static/js/config.factory.js @@ -50,7 +50,6 @@ poconsole.factory('config',function($q, $http, $log){ var deferred = $q.defer(); $http.get('/logger_status').success(function(data) { deferred.resolve({ - pid:data.pid, status: data.status }); }); diff --git a/web_db/flask/app/static/js/tags.controller.js b/web_db/flask/app/static/js/tags.controller.js index e566edb..6279f95 100644 --- a/web_db/flask/app/static/js/tags.controller.js +++ b/web_db/flask/app/static/js/tags.controller.js @@ -23,7 +23,6 @@ poconsole.controller('tagsCtrl', function($scope, $route, $http, $routeParams, P $scope.submitAddTag = function(){ var createStatus = tags.createTag($scope.newTag); - $scope.createStatus = createStatus.status; $scope.loadTagList(); }; diff --git a/web_db/flask/app/static/templates/config.html b/web_db/flask/app/static/templates/config.html index d49a8bb..1e1e85e 100644 --- a/web_db/flask/app/static/templates/config.html +++ b/web_db/flask/app/static/templates/config.html @@ -48,7 +48,7 @@

Checking Logger Status...

- + diff --git a/web_db/startup.sh b/web_db/startup.sh index 2d1ed77..56e72ba 100644 --- a/web_db/startup.sh +++ b/web_db/startup.sh @@ -1,3 +1,5 @@ +#!/bin/bash + service mysql start sleep 5 python /root/tag-logger/flask/run.py