fixes to comply with PEP8
This commit is contained in:
@@ -1 +1 @@
|
||||
pass
|
||||
pass
|
||||
|
||||
@@ -5,11 +5,13 @@ import math
|
||||
from app.datalogger.models import Device
|
||||
from app import db
|
||||
|
||||
|
||||
def getMainPLC():
|
||||
dev = Device.query.first()
|
||||
dev_obj = dev.serialize
|
||||
return dev_obj
|
||||
|
||||
|
||||
def readTag(addr, tag):
|
||||
c = ClxDriver()
|
||||
if c.open(addr):
|
||||
@@ -22,26 +24,27 @@ def readTag(addr, tag):
|
||||
traceback.print_exc()
|
||||
c.close()
|
||||
|
||||
|
||||
def getTotals():
|
||||
today_tags = [
|
||||
{'name':"Average SPM",'tag':"TODAY_Average_SPM", 'min':0, 'max': 20, 'units':'SPM'},
|
||||
{'name':"Downhole Net Stroke",'tag':"TODAY_Downhole_NetStroke", 'min':0, 'max':150, 'units':'in'},
|
||||
{'name':"Electricity Cost",'tag':"TODAY_Electricity_Cost", 'min':0, 'max':100, 'units':'$'},
|
||||
{'name':"Fluid Level",'tag':"TODAY_Fluid_Above_Pump", 'min':0, 'max':10000, 'units':'ft'},
|
||||
{'name':"Inflow Rate",'tag':"TODAY_Inflow_Rate", 'min':0, 'max':100, 'units':'BBL/day'},
|
||||
{'name':"Energy Used",'tag':"TODAY_kWh", 'min':0, 'max':100, 'units':'kWh'},
|
||||
{'name':"Energy Regen",'tag':"TODAY_kWh_Regen", 'min':0, 'max':100, 'units':'kWh'},
|
||||
{'name':"Lifting Cost",'tag':"TODAY_Lifting_Cost", 'min':0, 'max':100, 'units':'$'},
|
||||
{'name':"Peak Load",'tag':"TODAY_Max_Load", 'min':0, 'max':50000, 'units':'lbs'},
|
||||
{'name':"Min Load",'tag':"TODAY_Min_Load", 'min':0, 'max':50000, 'units':'lbs'},
|
||||
{'name':"Percent Run",'tag':"TODAY_Percent_Run", 'min':0, 'max':100, 'units':'%'},
|
||||
{'name':"Polished Rod HP",'tag':"TODAY_Polished_Rod_HP", 'min':0, 'max':25, 'units':'HP'},
|
||||
{'name':"Calculated Production",'tag':"TODAY_Production_Calculated", 'min':0, 'max':500, 'units':'BBL'},
|
||||
{'name':"Projected Production",'tag':"TODAY_Production_Projected", 'min':0, 'max':500, 'units':'BBL'},
|
||||
{'name':"Pump HP",'tag':"TODAY_Pump_HP", 'min':0, 'max':25, 'units':'HP'},
|
||||
{'name':"Pump Intake Presure",'tag':"TODAY_Pump_Intake_Pressure", 'min':0, 'max':5000, 'units':'PSI'},
|
||||
{'name':"Surface Stroke Length",'tag':"TODAY_Surface_StrokeLength", 'min':0, 'max':150, 'units':'in'},
|
||||
{'name':"Tubing Movement",'tag':"TODAY_Tubing_Movement", 'min':0, 'max':150, 'units':'in'}
|
||||
{'name': "Average SPM", 'tag': "TODAY_Average_SPM", 'min': 0, 'max': 20, 'units': 'SPM'},
|
||||
{'name': "Downhole Net Stroke", 'tag': "TODAY_Downhole_NetStroke", 'min': 0, 'max': 150, 'units': 'in'},
|
||||
{'name': "Electricity Cost", 'tag': "TODAY_Electricity_Cost", 'min': 0, 'max': 100, 'units': '$'},
|
||||
{'name': "Fluid Level", 'tag': "TODAY_Fluid_Above_Pump", 'min': 0, 'max': 10000, 'units': 'ft'},
|
||||
{'name': "Inflow Rate", 'tag': "TODAY_Inflow_Rate", 'min': 0, 'max': 100, 'units': 'BBL/day'},
|
||||
{'name': "Energy Used", 'tag': "TODAY_kWh", 'min': 0, 'max': 100, 'units': 'kWh'},
|
||||
{'name': "Energy Regen", 'tag': "TODAY_kWh_Regen", 'min': 0, 'max': 100, 'units': 'kWh'},
|
||||
{'name': "Lifting Cost", 'tag': "TODAY_Lifting_Cost", 'min': 0, 'max': 100, 'units': '$'},
|
||||
{'name': "Peak Load", 'tag': "TODAY_Max_Load", 'min': 0, 'max': 50000, 'units': 'lbs'},
|
||||
{'name': "Min Load", 'tag': "TODAY_Min_Load", 'min': 0, 'max': 50000, 'units': 'lbs'},
|
||||
{'name': "Percent Run", 'tag': "TODAY_Percent_Run", 'min': 0, 'max': 100, 'units': '%'},
|
||||
{'name': "Polished Rod HP", 'tag': "TODAY_Polished_Rod_HP", 'min': 0, 'max': 25, 'units': 'HP'},
|
||||
{'name': "Calculated Production", 'tag': "TODAY_Production_Calculated", 'min': 0, 'max': 500, 'units': 'BBL'},
|
||||
{'name': "Projected Production", 'tag': "TODAY_Production_Projected", 'min': 0, 'max': 500, 'units': 'BBL'},
|
||||
{'name': "Pump HP", 'tag': "TODAY_Pump_HP", 'min': 0, 'max': 25, 'units': 'HP'},
|
||||
{'name': "Pump Intake Presure", 'tag': "TODAY_Pump_Intake_Pressure", 'min': 0, 'max': 5000, 'units': 'PSI'},
|
||||
{'name': "Surface Stroke Length", 'tag': "TODAY_Surface_StrokeLength", 'min': 0, 'max': 150, 'units': 'in'},
|
||||
{'name': "Tubing Movement", 'tag': "TODAY_Tubing_Movement", 'min': 0, 'max': 150, 'units': 'in'}
|
||||
]
|
||||
|
||||
main_plc = getMainPLC()
|
||||
@@ -50,7 +53,7 @@ def getTotals():
|
||||
try:
|
||||
val = readTag(main_plc['address'], tag['tag'])[0]
|
||||
if not math.isnan(val):
|
||||
outList.append({'name':tag['name'], 'value':val, 'max':tag['max'], 'min': tag['min'], 'units': tag['units']})
|
||||
outList.append({'name': tag['name'], 'value': val, 'max': tag['max'], 'min': tag['min'], 'units': tag['units']})
|
||||
except Exception as e:
|
||||
print("Error while reading total: {}".format(e))
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ class Config(db.Model):
|
||||
"updated_on": self.updated_on,
|
||||
}
|
||||
|
||||
|
||||
class Device_type(db.Model):
|
||||
__tablename__ = "device_types"
|
||||
_id = db.Column(db.Integer, primary_key=True)
|
||||
@@ -43,7 +44,7 @@ class Device(db.Model):
|
||||
__tablename__ = "devices"
|
||||
_id = db.Column(db.Integer, primary_key=True)
|
||||
device_type_id = db.Column(db.Integer, db.ForeignKey('device_types._id'))
|
||||
device_type = db.relationship(Device_type, primaryjoin=device_type_id==Device_type._id)
|
||||
device_type = db.relationship(Device_type, primaryjoin=device_type_id == Device_type._id)
|
||||
address = db.Column(db.String(256))
|
||||
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
|
||||
updated_on = db.Column(db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
@@ -79,8 +80,6 @@ class Doc(db.Model):
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
class Tag(db.Model):
|
||||
__tablename__ = "tags"
|
||||
_id = db.Column(db.Integer, primary_key=True)
|
||||
@@ -100,7 +99,6 @@ class Tag(db.Model):
|
||||
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
|
||||
updated_on = db.Column(db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
|
||||
@property
|
||||
def serialize(self):
|
||||
return {
|
||||
@@ -143,6 +141,7 @@ class Tag_val(db.Model):
|
||||
"updated_on": self.updated_on
|
||||
}
|
||||
|
||||
|
||||
class Card(db.Model):
|
||||
__tablename__ = "cards"
|
||||
_id = db.Column(db.Integer, primary_key=True)
|
||||
@@ -158,7 +157,6 @@ class Card(db.Model):
|
||||
# def __repr__(self):
|
||||
# return self.serialize
|
||||
|
||||
|
||||
@property
|
||||
def serialize(self):
|
||||
json_surf_pos = json.loads(self.surf_pos)
|
||||
@@ -178,7 +176,6 @@ class Card(db.Model):
|
||||
}
|
||||
|
||||
|
||||
|
||||
class GaugeOffVal(db.Model):
|
||||
__tablename__ = "gauge_off"
|
||||
_id = db.Column(db.Integer, primary_key=True)
|
||||
@@ -231,6 +228,7 @@ class GaugeOffVal(db.Model):
|
||||
"updated_on": self.updated_on,
|
||||
}
|
||||
|
||||
|
||||
class WellTest(db.Model):
|
||||
__tablename__ = "well_tests"
|
||||
_id = db.Column(db.Integer, primary_key=True)
|
||||
@@ -263,6 +261,7 @@ class WellTest(db.Model):
|
||||
"updated_on": self.updated_on,
|
||||
}
|
||||
|
||||
|
||||
class Note(db.Model):
|
||||
__tablename__ = "notes"
|
||||
_id = db.Column(db.Integer, primary_key=True)
|
||||
@@ -282,7 +281,6 @@ class Note(db.Model):
|
||||
}
|
||||
|
||||
|
||||
|
||||
class EventConfig(db.Model):
|
||||
__tablename__ = "event_configs"
|
||||
_id = db.Column(db.Integer, primary_key=True)
|
||||
@@ -332,9 +330,10 @@ class Event(db.Model):
|
||||
"updated_on": self.updated_on,
|
||||
}
|
||||
|
||||
|
||||
class RunStatus(db.Model):
|
||||
__tablename__ = "run_status_log"
|
||||
_id =db.Column(db.Integer, primary_key=True)
|
||||
_id = db.Column(db.Integer, primary_key=True)
|
||||
run_status = db.Column(db.String(64))
|
||||
created_on = db.Column(db.DateTime(), default=datetime.utcnow)
|
||||
updated_on = db.Column(db.DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
@@ -13,10 +13,12 @@ from app.datalogger.getDailyTotals import getTotals
|
||||
|
||||
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'doc', 'docx', 'xls', 'xlsx', 'zip'])
|
||||
|
||||
|
||||
def allowed_file(filename):
|
||||
return '.' in filename and \
|
||||
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
|
||||
|
||||
|
||||
def latest_to_obj(tup):
|
||||
ob = {}
|
||||
ob['id'] = tup[0]
|
||||
@@ -31,6 +33,7 @@ def latest_to_obj(tup):
|
||||
ob['max_expected'] = tup[9]
|
||||
return ob
|
||||
|
||||
|
||||
def tagsattime_to_obj(tup):
|
||||
ob = {}
|
||||
ob['_id'] = tup[0]
|
||||
@@ -70,6 +73,7 @@ def get_multiple_tags(ids):
|
||||
res = Tag.query.filter(Tag._id.in_(ids)).all()
|
||||
return jsonify([i.serialize for i in res])
|
||||
|
||||
|
||||
@app.route('/doc/upload', methods=['POST'])
|
||||
def upload_file():
|
||||
# check if the post request has the file part
|
||||
@@ -92,11 +96,13 @@ def upload_file():
|
||||
filename=filename))
|
||||
return redirect("/#/docs")
|
||||
|
||||
|
||||
@app.route('/docs/<filename>')
|
||||
def uploaded_file(filename):
|
||||
return send_from_directory(app.config['UPLOAD_FOLDER'],
|
||||
filename)
|
||||
|
||||
|
||||
@app.route('/csv/all')
|
||||
def get_csv_all():
|
||||
csv_string = "datetime,"
|
||||
@@ -117,6 +123,7 @@ def get_csv_all():
|
||||
headers={"Content-disposition":
|
||||
"attachment; filename=datadump.csv"})
|
||||
|
||||
|
||||
@app.route('/csv/<string:ids>')
|
||||
def get_csv_selected(ids):
|
||||
csv_string = "datetime,"
|
||||
@@ -135,7 +142,8 @@ def get_csv_selected(ids):
|
||||
csv_string,
|
||||
mimetype="text/csv",
|
||||
headers={"Content-disposition":
|
||||
"attachment; filename=datadump{}.csv".format(ids.replace(",","-"))})
|
||||
"attachment; filename=datadump{}.csv".format(ids.replace(",", "-"))})
|
||||
|
||||
|
||||
@app.route('/api/card_dates')
|
||||
def get_card_dates():
|
||||
@@ -145,14 +153,15 @@ def get_card_dates():
|
||||
return jsonify([str(i[0]) for i in res])
|
||||
# return jsonify([i.serialize for i in res])
|
||||
|
||||
@app.route('/api/cardsbydate/<string:datepar>', defaults={'page':1})
|
||||
|
||||
@app.route('/api/cardsbydate/<string:datepar>', defaults={'page': 1})
|
||||
@app.route('/api/cardsbydate/<string:datepar>/<int:page>')
|
||||
def get_cardsbydate(datepar, page):
|
||||
res = Card.query.with_entities(Card._id, Card.stroke_number, Card.stroke_type, Card.created_on).filter(func.date(Card.created_on, 'localtime') == datepar).order_by(desc(Card.created_on)).paginate(page=page,per_page=20, error_out=False)
|
||||
res = Card.query.with_entities(Card._id, Card.stroke_number, Card.stroke_type, Card.created_on).filter(func.date(Card.created_on, 'localtime') == datepar).order_by(desc(Card.created_on)).paginate(page=page, per_page=20, error_out=False)
|
||||
# Mon, 14 Nov 2016 19:46:09 GMT
|
||||
return jsonify({
|
||||
'cards':[{'_id': i[0], 'stroke_number': i[1], 'stroke_type': i[2], 'created_on': i[3]} for i in res.items],
|
||||
'num_pages':res.pages, 'per_page': res.per_page, 'total':res.total})
|
||||
'cards': [{'_id': i[0], 'stroke_number': i[1], 'stroke_type': i[2], 'created_on': i[3]} for i in res.items],
|
||||
'num_pages': res.pages, 'per_page': res.per_page, 'total': res.total})
|
||||
|
||||
|
||||
@app.route('/api/tagsattime/<string:datetime>')
|
||||
@@ -163,6 +172,7 @@ def get_tags_at_time(datetime):
|
||||
tag_data_list = list(map(tagsattime_to_obj, tag_data))
|
||||
return jsonify(tag_data_list)
|
||||
|
||||
|
||||
@app.route('/api/today_totals')
|
||||
def today_totals():
|
||||
return jsonify(getTotals())
|
||||
|
||||
@@ -7,6 +7,7 @@ REQ_TARGET = "localhost"
|
||||
REQ_PORT = 5000
|
||||
REQ_URL_BASE = "{}://{}:{}/api".format(REQ_METHOD, REQ_TARGET, REQ_PORT)
|
||||
|
||||
|
||||
def insert_data(db_table, test_data):
|
||||
global REQ_URL_BASE
|
||||
REQ_URL = "{}/{}".format(REQ_URL_BASE, db_table)
|
||||
@@ -15,6 +16,7 @@ def insert_data(db_table, test_data):
|
||||
input_id = post_res["_id"]
|
||||
return input_id
|
||||
|
||||
|
||||
def get_data(db_table, obj_id):
|
||||
global REQ_URL_BASE
|
||||
REQ_URL = "{}/{}/{}".format(REQ_URL_BASE, db_table, obj_id)
|
||||
@@ -221,6 +223,5 @@ class TestStroke(unittest.TestCase):
|
||||
# self.assertTrue(test_note[x] == data_in_db[x])
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
Reference in New Issue
Block a user