Files
DataLogger-Generic/web_db/flask/app/__init__.py
Patrick McDonagh 5d250bfac4 Lots of fixes, docker optimization
- fixes status check for logger
- adds ability to test sample data
- adds PLC Handshaking capability
- adds portainer as container manager
- exposes mysql port for reading database (as 6603)
2017-05-10 18:09:10 -05:00

147 lines
5.3 KiB
Python

# project/__init__.py
import os
from flask import Flask, render_template, request, session, send_from_directory, jsonify, url_for, flash, redirect, Response
from flask_sqlalchemy import SQLAlchemy
from werkzeug.utils import secure_filename
from sqlalchemy import and_
import mysql.connector
DAQ_HOSTNAME = "daq"
UPLOAD_FOLDER = '/root/tag-server/flask/app/docs'
ALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'doc', 'docx', 'xls', 'xlsx', 'zip'}
app = Flask('app', static_url_path='')
app.config.update(
DEBUG=True,
SQLALCHEMY_DATABASE_URI='mysql+mysqlconnector://website:henrypump@127.0.0.1/poconsole'
# SQLALCHEMY_DATABASE_URI='sqlite:///../database.db',
)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
app.secret_key = 'henry_pump'
db = SQLAlchemy(app)
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def catch_all(path):
return app.send_static_file('index.html')
from .datalogger import datalogger
from .datalogger.models import *
@app.route('/api/latest')
def get_latest_tag_vals():
res = db.engine.execute('SELECT v1.id as id, v1.created_on as dtime, t.id as t_id, t.name as name, t.tag as tag, '
'v1.value as value, t.units as units, t.description as description, '
't.min_expected as min_expected, t.max_expected as max_expected FROM tag_vals v1 '
'INNER JOIN tags t ON t.id = v1.tag_id WHERE v1.id = (SELECT v2.id FROM tag_vals v2 '
'WHERE v2.tag_id = v1.tag_id ORDER BY v2.id DESC LIMIT 1) ORDER BY t.id')
lat = res.fetchall()
latest_tags = list(map(latest_to_obj, lat))
return jsonify(latest_tags)
@app.route('/api/valuesbetween/<string:ids>/<string:start>/<string:end>')
def get_tag_vals_between(ids, start, end):
ids = ids.split(',')
res = Tag_val.query.filter(and_(Tag_val.tag_id.in_(ids), Tag_val.created_on > start,
Tag_val.created_on <= end)).all()
return jsonify([i.serialize for i in res])
@app.route('/api/multipletags/<string:ids>')
def get_multiple_tags(ids):
ids = ids.split(',')
res = Tag.query.filter(Tag.id.in_(ids)).all()
return jsonify([i.serialize for i in res])
@app.route('/doc/upload', methods=['POST'])
def upload_file():
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect("/#/docs")
upl_file = request.files['file']
# if user does not select file, browser also
# submit a empty part without filename
if upl_file.filename == '':
flash('No selected file')
return redirect("/#/docs")
if upl_file and allowed_file(upl_file.filename):
filename = secure_filename(upl_file.filename)
upl_file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
d = Doc(name=filename)
db.session.add(d)
db.session.commit()
return redirect(url_for('uploaded_file',
filename=filename))
return redirect("/#/docs")
@app.route('/docs/<filename>')
def uploaded_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'],
filename)
@app.route('/csv/all')
def get_csv_all():
csv_string = "datetime,"
all_tags = [i.serialize for i in Tag.query.all()]
all_tag_names = [x['name'] for x in all_tags]
for x in all_tag_names:
csv_string += "{},".format(x)
csv_string += "\n"
all_vals = [i.serialize for i in Tag_val.query.all()]
val_objs = [{'value': x['value'], 'tag_name': x['tag']['name'], 'datetime': x['created_on']} for x in all_vals]
for v in val_objs:
tag_ind = all_tag_names.index(v['tag_name'])
csv_string += "{},".format(v['datetime']) + "," * tag_ind + "{},".format(v['value']) + \
"," * (len(all_tag_names) - tag_ind) + "\n"
return Response(
csv_string,
mimetype="text/csv",
headers={"Content-disposition":
"attachment; filename=datadump.csv"})
@app.route('/csv/<string:ids>')
def get_csv_selected(ids):
csv_string = "datetime,"
all_tags = [i.serialize for i in Tag.query.filter(Tag.id.in_(ids)).all()]
all_tag_names = [x['name'] for x in all_tags]
for x in all_tag_names:
csv_string += "{},".format(x)
csv_string += "\n"
all_vals = [i.serialize for i in Tag_val.query.filter(Tag_val.tag_id.in_(ids)).all()]
val_objs = [{'value': x['value'], 'tag_name': x['tag']['name'], 'datetime': x['created_on']} for x in all_vals]
for v in val_objs:
tag_ind = all_tag_names.index(v['tag_name'])
csv_string += "{},".format(v['datetime']) + "," * tag_ind + "{},".format(v['value']) + \
"," * (len(all_tag_names) - tag_ind) + "\n"
return Response(
csv_string,
mimetype="text/csv",
headers={"Content-disposition":
"attachment; filename=datadump{}.csv".format(ids.replace(",", "-"))})
@app.route("/logger_status")
def get_logger_status():
ping_response = os.system("ping -c 1 " + DAQ_HOSTNAME)
if ping_response == 0:
return jsonify({"status": "up"})
return jsonify({"status": "down"})