280 lines
10 KiB
Python
280 lines
10 KiB
Python
|
|
import os
|
|
from flask import Flask, render_template, request, session, send_from_directory, jsonify, url_for, flash, redirect, Response
|
|
from werkzeug.utils import secure_filename
|
|
from sqlalchemy import and_, desc
|
|
from sqlalchemy.sql import func
|
|
from datetime import datetime
|
|
|
|
from app import app, db
|
|
from app.datalogger.models import *
|
|
from app.datalogger.getDailyTotals import getTotals
|
|
from pycomm_helper.utils import readTag, writeTag
|
|
from random import random
|
|
|
|
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'doc', 'docx', 'xls', 'xlsx', 'zip'])
|
|
|
|
|
|
def allowed_file(filename):
|
|
return '.' in filename and \
|
|
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
|
|
|
|
|
|
def latest_to_obj(tup):
|
|
ob = {}
|
|
ob['id'] = tup[0]
|
|
ob['datetime'] = str(tup[1])
|
|
ob['tag_id'] = str(tup[2])
|
|
ob['tag_name'] = str(tup[3])
|
|
ob['tag'] = str(tup[4])
|
|
ob['value'] = tup[5]
|
|
ob['units'] = tup[6]
|
|
ob['tag_description'] = str(tup[7])
|
|
ob['min_expected'] = tup[8]
|
|
ob['max_expected'] = tup[9]
|
|
return ob
|
|
|
|
|
|
def tagsattime_to_obj(tup):
|
|
ob = {}
|
|
ob['_id'] = tup[0]
|
|
ob['value'] = tup[1]
|
|
ob['created_on'] = str(tup[2])
|
|
ob['tag_name'] = str(tup[3])
|
|
ob['min_expected'] = tup[4]
|
|
ob['max_expected'] = tup[5]
|
|
ob['units'] = tup[6]
|
|
return ob
|
|
|
|
|
|
@app.route('/', defaults={'path': ''})
|
|
@app.route('/<path:path>')
|
|
def catch_all(path):
|
|
return app.send_static_file('index.html')
|
|
|
|
|
|
@app.route('/api/latest')
|
|
def get_latest_tag_vals():
|
|
res = db.engine.execute('SELECT v1._id as id, v1.created_on as dtime, t._id as t_id, t.name as name, t.tag as tag, v1.value as value, t.units as units, t.description as description, t.min_expected as min_expected, t.max_expected as max_expected FROM tag_vals v1 INNER JOIN tags t ON t._id = v1.tag_id WHERE v1._id = (SELECT v2._id FROM tag_vals v2 WHERE v2.tag_id = v1.tag_id ORDER BY v2._id DESC LIMIT 1) ORDER BY t._id')
|
|
lat = res.fetchall()
|
|
latest_tags = list(map(latest_to_obj, lat))
|
|
return jsonify(latest_tags)
|
|
|
|
|
|
@app.route('/api/valuesbetween/<string:ids>/<string:start>/<string:end>')
|
|
def get_tag_vals_between(ids, start, end):
|
|
ids = ids.split(',')
|
|
res = Tag_val.query.filter(and_(Tag_val.tag_id.in_(ids), Tag_val.created_on > start, Tag_val.created_on <= end)).all()
|
|
return jsonify([i.serialize for i in res])
|
|
|
|
|
|
@app.route('/api/multipletags/<string:ids>')
|
|
def get_multiple_tags(ids):
|
|
ids = ids.split(',')
|
|
res = Tag.query.filter(Tag._id.in_(ids)).all()
|
|
return jsonify([i.serialize for i in res])
|
|
|
|
|
|
@app.route('/doc/upload', methods=['POST'])
|
|
def upload_file():
|
|
# check if the post request has the file part
|
|
if 'file' not in request.files:
|
|
flash('No file part')
|
|
return redirect("/#/docs")
|
|
file = request.files['file']
|
|
# if user does not select file, browser also
|
|
# submit a empty part without filename
|
|
if file.filename == '':
|
|
flash('No selected file')
|
|
return redirect("/#/docs")
|
|
if file and allowed_file(file.filename):
|
|
filename = secure_filename(file.filename)
|
|
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
|
|
d = Doc(name=filename)
|
|
db.session.add(d)
|
|
db.session.commit()
|
|
return redirect(url_for('uploaded_file',
|
|
filename=filename))
|
|
return redirect("/#/docs")
|
|
|
|
|
|
@app.route('/docs/<filename>')
|
|
def uploaded_file(filename):
|
|
return send_from_directory(app.config['UPLOAD_FOLDER'],
|
|
filename)
|
|
|
|
|
|
@app.route('/csv/all')
|
|
def get_csv_all():
|
|
csv_string = "datetime,"
|
|
all_tags = [i.serialize for i in Tag.query.all()]
|
|
all_tag_names = [x['name'] for x in all_tags]
|
|
for x in all_tag_names:
|
|
csv_string += "{},".format(x)
|
|
csv_string += "\n"
|
|
|
|
all_vals = [i.serialize for i in Tag_val.query.all()]
|
|
val_objs = [{'value': x['value'], 'tag_name': x['tag']['name'], 'datetime': x['created_on']} for x in all_vals]
|
|
for v in val_objs:
|
|
tag_ind = all_tag_names.index(v['tag_name'])
|
|
csv_string += "{},".format(v['datetime']) + "," * tag_ind + "{},".format(v['value']) + "," * (len(all_tag_names) - tag_ind) + "\n"
|
|
return Response(
|
|
csv_string,
|
|
mimetype="text/csv",
|
|
headers={"Content-disposition":
|
|
"attachment; filename=datadump.csv"})
|
|
|
|
|
|
@app.route('/csv/<string:ids>')
|
|
def get_csv_selected(ids):
|
|
csv_string = "datetime,"
|
|
all_tags = [i.serialize for i in Tag.query.filter(Tag._id.in_(ids)).all()]
|
|
all_tag_names = [x['name'] for x in all_tags]
|
|
for x in all_tag_names:
|
|
csv_string += "{},".format(x)
|
|
csv_string += "\n"
|
|
|
|
all_vals = [i.serialize for i in Tag_val.query.filter(Tag_val.tag_id.in_(ids)).all()]
|
|
val_objs = [{'value': x['value'], 'tag_name': x['tag']['name'], 'datetime': x['created_on']} for x in all_vals]
|
|
for v in val_objs:
|
|
tag_ind = all_tag_names.index(v['tag_name'])
|
|
csv_string += "{},".format(v['datetime']) + "," * tag_ind + "{},".format(v['value']) + "," * (len(all_tag_names) - tag_ind) + "\n"
|
|
return Response(
|
|
csv_string,
|
|
mimetype="text/csv",
|
|
headers={"Content-disposition":
|
|
"attachment; filename=datadump{}.csv".format(ids.replace(",", "-"))})
|
|
|
|
|
|
@app.route('/api/card_dates')
|
|
def get_card_dates():
|
|
# res = session.query(func.DATE(Card.created_on)).distinct()
|
|
query = 'SELECT DISTINCT(DATE("created_on", "localtime")) FROM cards'
|
|
res = db.engine.execute(query).fetchall()
|
|
return jsonify([str(i[0]) for i in res])
|
|
# return jsonify([i.serialize for i in res])
|
|
|
|
|
|
@app.route('/api/cardsbydate/<string:datepar>', defaults={'page': 1})
|
|
@app.route('/api/cardsbydate/<string:datepar>/<int:page>')
|
|
def get_cardsbydate(datepar, page):
|
|
res = Card.query.with_entities(Card._id, Card.stroke_number, Card.stroke_type, Card.created_on).filter(func.date(Card.created_on, 'localtime') == datepar).order_by(desc(Card.created_on)).paginate(page=page, per_page=20, error_out=False)
|
|
# Mon, 14 Nov 2016 19:46:09 GMT
|
|
return jsonify({
|
|
'cards': [{'_id': i[0], 'stroke_number': i[1], 'stroke_type': i[2], 'created_on': i[3]} for i in res.items],
|
|
'num_pages': res.pages, 'per_page': res.per_page, 'total': res.total})
|
|
|
|
|
|
@app.route('/api/tagsattime/<string:datetime>')
|
|
def get_tags_at_time(datetime):
|
|
query = "SELECT v._id, v.value, v.created_on, t.name, t.max_expected, t.min_expected, t.units FROM (SELECT tag_id, MAX(created_on) created_on FROM tag_vals WHERE created_on <= '{}' GROUP BY tag_id) v0 JOIN tag_vals v ON v0.tag_id = v.tag_id AND v0.created_on = v.created_on JOIN tags t ON t._id = v.tag_id".format(datetime)
|
|
res = db.engine.execute(query)
|
|
tag_data = res.fetchall()
|
|
tag_data_list = list(map(tagsattime_to_obj, tag_data))
|
|
return jsonify(tag_data_list)
|
|
|
|
|
|
@app.route('/api/today_totals')
|
|
def today_totals():
|
|
return jsonify(getTotals())
|
|
|
|
|
|
@app.route('/api/csv_stroke/<int:stroke_number>')
|
|
def get_csv_for_stroke(stroke_number):
|
|
"""returns a CSV of the given stroke"""
|
|
res = Card.query.filter(Card._id == stroke_number).one()
|
|
res.surf_pos = json.loads(res.surf_pos)
|
|
res.surf_lod = json.loads(res.surf_lod)
|
|
res.down_pos = json.loads(res.down_pos)
|
|
res.down_lod = json.loads(res.down_lod)
|
|
nl = "\n"
|
|
csv_string = ""
|
|
csv_string += "Card_ID,{}".format(res._id) + nl
|
|
csv_string += "datetime,{}".format(res.created_on) + nl
|
|
csv_string += nl
|
|
csv_string += "Surface Position,Surface Load" + nl
|
|
for i in range(0, len(res.surf_pos)):
|
|
csv_string += "{},{}".format(res.surf_pos[i], res.surf_lod[i]) + nl
|
|
csv_string += nl
|
|
csv_string += "Downhole Position, Downhole Load" + nl
|
|
for i in range(0, len(res.down_pos)):
|
|
csv_string += "{},{}".format(res.down_pos[i], res.down_lod[i]) + nl
|
|
return Response(
|
|
csv_string,
|
|
mimetype="text/csv",
|
|
headers={"Content-disposition":
|
|
"attachment; filename=card_{}.csv".format(res._id)})
|
|
|
|
|
|
@app.route('/api/backup_all')
|
|
def backup_all():
|
|
all_br = [i.serialize for i in BackupRestore.query.all()]
|
|
for i in range(0, len(all_br)):
|
|
try:
|
|
read_value = readTag(all_br[i]['device']['address'], all_br[i]['tag'])
|
|
# read_value = (random() * 100.0, 'REAL')
|
|
all_br[i]['value'] = read_value[0]
|
|
all_br[i]['tag_type'] = read_value[1]
|
|
BackupRestore.query.filter_by(_id=all_br[i]['_id']).update({"value": str(all_br[i]['value']), "tag_type": all_br[i]['tag_type']})
|
|
db.session.commit()
|
|
except:
|
|
print("Error backing up tag value for tag {}".format(all_br[i]['tag']))
|
|
continue
|
|
return jsonify(all_br)
|
|
|
|
|
|
@app.route('/api/backup/<int:id>')
|
|
def backup_single(id):
|
|
"""
|
|
Backs up the tag value of the specified database entry
|
|
"""
|
|
|
|
single_br = BackupRestore.query.filter_by(_id=id).one().serialize
|
|
try:
|
|
read_value = readTag(single_br['device']['address'], single_br['tag'])
|
|
# read_value = (random() * 100.0, 'REAL')
|
|
single_br['value'] = read_value[0]
|
|
single_br['tag_type'] = read_value[1]
|
|
BackupRestore.query.filter_by(_id=single_br['_id']).update({"value": str(single_br['value']), "tag_type": single_br['tag_type']})
|
|
db.session.commit()
|
|
except:
|
|
print("Error backing up tag value for tag {}".format(single_br['tag']))
|
|
return jsonify(single_br)
|
|
|
|
|
|
@app.route('/api/restore_all')
|
|
def restore_all():
|
|
all_br = [i.serialize for i in BackupRestore.query.all()]
|
|
for i in range(0, len(all_br)):
|
|
try:
|
|
tag_val = 0
|
|
if all_br[i]['tag_type'] == "REAL":
|
|
tag_val = float(all_br[i]['value'])
|
|
else:
|
|
tag_val = int(all_br[i]['value'])
|
|
|
|
write_value = readTag(all_br[i]['device']['address'], all_br[i]['tag'], write_value)
|
|
except:
|
|
print("Error backing up tag value for tag {}".format(all_br[i]['tag']))
|
|
continue
|
|
return jsonify(all_br)
|
|
|
|
|
|
@app.route('/api/restore/<int:id>')
|
|
def restore_single(id):
|
|
"""
|
|
Writes the value of the specified database entry to the PLC
|
|
"""
|
|
|
|
single_br = BackupRestore.query.filter_by(_id=id).one().serialize
|
|
try:
|
|
if single_br['tag_type'] == "REAL":
|
|
tag_val = float(single_br['value'])
|
|
else:
|
|
tag_val = int(single_br['value'])
|
|
|
|
write_value = writeTag(single_br['device']['address'], single_br['tag'], tag_val)
|
|
except:
|
|
print("Error backing up tag value for tag {}".format(single_br['tag']))
|
|
return jsonify(single_br)
|