Rewrote SQLite POC driver

This commit is contained in:
Patrick McDonagh
2016-03-23 17:20:29 -05:00
parent c3693d8e4b
commit deec5db66d

View File

@@ -15,6 +15,7 @@ import pickle
import re import re
from device_base import deviceBase from device_base import deviceBase
from datetime import datetime from datetime import datetime
import traceback
import requests import requests
try: try:
@@ -24,85 +25,109 @@ except:
import calendar import calendar
def min_max_check(val, min, max): class Channel():
if val < min: def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec):
return min self.mesh_name = mesh_name
elif val > max: self.data_type = data_type
return max self.last_value = None
else: self.value = None
return val self.last_send_time = 0
self.chg_threshold = chg_threshold
self.guarantee_sec = guarantee_sec
def checkSend(self, newVal, force):
v = self.data_type(newVal)
if self.data_type == bool or self.data_type == str:
if (self.last_send_time == 0) or (self.value is None) or not (self.value == v) or ((self.guarantee_sec > 0) and ((time.time() - self.last_send_time) > self.guarantee_sec)) or (force):
self.last_value = self.value
self.value = v
self.last_send_time = time.time()
return True
else:
return False
else:
if (self.last_send_time == 0) or (self.value is None) or (abs(self.value - v) > self.chg_threshold) or ((self.guarantee_sec > 0) and ((time.time() - self.last_send_time) > self.guarantee_sec)) or (force):
self.last_value = self.value
self.value = v
self.last_send_time = time.time()
return True
else:
return False
go_channels = { go_channels = {
"percent_run": {"meshifyName": "go_percent_run", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'electricity_cost': Channel('go_electricity_cost', float, 0.0, 0),
"kWh": {"meshifyName": "go_kwh", "last_value": "", "last_send_time": 0, "data_type": " float", "change_amount": 0}, 'percent_run': Channel('go_percent_run', float, 0.0, 0),
"electricity_cost": {"meshifyName": "go_electricity_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'average_SPM': Channel('go_average_spm', float, 0.0, 0),
"peak_load": {"meshifyName": "go_peak_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'peak_load': Channel('go_peak_load', float, 0.0, 0),
"min_load": {"meshifyName": "go_min_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'polished_rod_HP': Channel('go_polished_rod_hp', float, 0.0, 0),
"average_SPM": {"meshifyName": "go_average_spm", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'lifting_cost': Channel('go_lifting_cost', float, 0.0, 0),
"production_calculated": {"meshifyName": "go_production_calculated", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'full_card_production': Channel('go_full_card_production', float, 0.0, 0),
"full_card_production": {"meshifyName": "go_full_card_production", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'fluid_above_pump': Channel('go_fluid_above_pump', float, 0.0, 0),
"polished_rod_HP": {"meshifyName": "go_polished_rod_hp", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'production_calculated': Channel('go_production_calculated', float, 0.0, 0),
"lifting_cost": {"meshifyName": "go_lifting_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'kWh': Channel('go_kwh', float, 0.0, 3600),
"fluid_above_pump": {"meshifyName": "go_fluid_above_pump", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'inflow_rate': Channel('go_inflow_rate', float, 0.0, 0),
"pump_intake_pressure": {"meshifyName": "go_pump_intake_pressure", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'kWh_regen': Channel('go_kwh_regen', float, 0.0, 0),
"kWh_regen": {"meshifyName": "go_kwh_regen", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'pump_intake_pressure': Channel('go_pump_intake_pressure', float, 0.0, 0),
"inflow_rate": {"meshifyName": "go_inflow_rate", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, 'min_load': Channel('go_min_load', float, 0.0, 0),
} }
stroke_data_min_upload_time = 300 # seconds card_channels = {
"card_id": Channel("card_history", int, 0, 0),
"card_type": Channel("cardtype", str, 0, 0),
"card": Channel(None, str, 0, 600)
}
channels = { status = Channel('status', str, 0, 0)
"status": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
"card_history": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
"well_name": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"tubing_head_pressure": {"last_value": "", "data_type": "float", "change_amount": 5, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"fluid_gradient": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"stuffing_box_friction": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"dt": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"downhole_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"downhole_adjusted_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"downhole_net_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"downhole_fluid_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"surface_max_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"surface_min_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"tubing_movement": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"surface_stroke_length": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"fillage_percent": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"polished_rod_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"pump_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"SPM": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"fluid_above_pump": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"pump_intake_pressure": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"stroke_production": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"drive_torque_mode": {"last_value": "", "data_type": "int", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"torque_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"speed_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"downhole_min_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
"downhole_max_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
}
tag_channels = {
'polished_rod_hp': Channel('polished_rod_hp', float, 1.0, 3600),
'drive_torque_mode': Channel('drive_torque_mode', int, 1.0, 3600),
'downhole_gross_stroke': Channel('downhole_gross_stroke', float, 1.0, 3600),
'fluid_gradient': Channel('fluid_gradient', float, 0.01, 3600),
'tubing_head_pressure': Channel('tubing_head_pressure', float, 10.0, 3600),
'surface_min_load': Channel('surface_min_load', float, 500.0, 3600),
'downhole_fluid_load': Channel('downhole_fluid_load', float, 500.0, 3600),
'downhole_max_position': Channel('downhole_max_position', float, 1.0, 3600),
'downhole_net_stroke': Channel('downhole_net_stroke', float, 1.0, 3600),
'fillage_percent': Channel('fillage_percent', float, 5.0, 3600),
'pump_hp': Channel('pump_hp', float, 1.0, 3600),
'spm': Channel('SPM', float, 0.5, 3600),
'pump_intake_pressure': Channel('pump_intake_pressure', float, 200.0, 3600),
'speed_reference': Channel('speed_reference', float, 50.0, 3600),
'downhole_min_position': Channel('downhole_min_position', float, 1.0, 3600),
'tubing_movement': Channel('tubing_movement', float, 1.0, 3600),
'surface_max_load': Channel('surface_max_load', float, 500.0, 3600),
'stuffing_box_friction': Channel('stuffing_box_friction', float, 50.0, 3600),
'dt': Channel('dt', float, 0.001, 3600),
'fluid_level': Channel('fluid_above_pump', float, 100.0, 3600),
'torque_reference': Channel('torque_reference', float, 5.0, 3600),
'surface_stroke_length': Channel('surface_stroke_length', float, 1.0, 3600),
'downhole_adjusted_gross_stroke': Channel('downhole_adjusted_gross_stroke', float, 1.0, 3600),
'stroke_production': Channel('stroke_production', float, 0.001, 3600),
}
total_min_upload_time = 300 # seconds
dt_channels = { # Current Daily Totals dt_channels = { # Current Daily Totals
"Average_SPM": {"meshify_channel": "dt_average_spm", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Electricity_Cost': Channel('dt_electricity_cost', float, 1.0, 3600),
"Downhole_Net_Stroke": {"meshify_channel": "dt_downhole_net_stroke", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Downhole_Net_Stroke': Channel('dt_downhole_net_stroke', float, 2.0, 3600),
"Electricity_Cost": {"meshify_channel": "dt_electricity_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Tubing_Movement': Channel('dt_tubing_movement', float, 1.0, 3600),
"Fluid_Level": {"meshify_channel": "dt_fluid_level", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Average_SPM': Channel('dt_average_spm', float, 0.50, 3600),
"Full_Card_Production": {"meshify_channel": "dt_full_card_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Peak_Load': Channel('dt_peak_load', float, 500.0, 3600),
"Inflow_Rate": {"meshify_channel": "dt_inflow_rate", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'kWh': Channel('dt_kWh', float, 5.0, 3600),
"kWh": {"meshify_channel": "dt_kWh", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Pump_HP': Channel('dt_pump_hp', float, 0.5, 3600),
"kWh_Regen": {"meshify_channel": "dt_kWh_regen", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Percent_Run': Channel('dt_percent_run', float, 5.0, 3600),
"Lifting_Cost": {"meshify_channel": "dt_lifting_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Projected_Production': Channel('dt_projected_production', float, 5.0, 3600),
"Peak_Load": {"meshify_channel": "dt_peak_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Pump_Intake_Presure': Channel('dt_pump_intake_pressure', float, 100.0, 3600),
"Min_Load": {"meshify_channel": "dt_min_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Inflow_Rate': Channel('dt_inflow_rate', float, 1.0, 3600),
"Percent_Run": {"meshify_channel": "dt_percent_run", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Calculated_Production': Channel('dt_calculated_production', float, 5.0, 3600),
"Polished_Rod_HP": {"meshify_channel": "dt_polished_rod_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Fluid_Level': Channel('dt_fluid_level', float, 100.0, 3600),
"Calculated_Production": {"meshify_channel": "dt_calculated_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Lifting_Cost': Channel('dt_lifting_cost', float, 1.0, 3600),
"Projected_Production": {"meshify_channel": "dt_projected_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Polished_Rod_HP': Channel('dt_polished_rod_hp', float, 1.0, 3600),
"Pump_HP": {"meshify_channel": "dt_pump_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'kWh_Regen': Channel('dt_kWh_regen', float, 1.0, 3600),
"Pump_Intake_Presure": {"meshify_channel": "dt_pump_intake_pressure", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Surface_Stroke_Length': Channel('dt_surface_stroke_length', float, 1.0, 3600),
"Surface_Stroke_Length": {"meshify_channel": "dt_surface_stroke_length", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Full_Card_Production': Channel('dt_full_card_production', float, 10.0, 3600),
"Tubing_Movement": {"meshify_channel": "dt_tubing_movement", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time}, 'Min_Load': Channel('dt_min_load', float, 500.0, 3600),
} }
@@ -188,6 +213,12 @@ class start(threading.Thread, deviceBase):
checkBackupSkipped = 1 checkBackupSkipped = 1
while True: while True:
try: try:
if self.forceSend:
print("!!!!!!!!!!!!!!! FORCE SEND !!!!!!!!!!!!!!!")
runLoopStatus = "checkStatus"
self.checkStatus()
runLoopStatus = "checkEvents" runLoopStatus = "checkEvents"
self.checkEvents() self.checkEvents()
@@ -200,24 +231,26 @@ class start(threading.Thread, deviceBase):
runLoopStatus = "checkFluidShots" runLoopStatus = "checkFluidShots"
self.checkFluidShots() self.checkFluidShots()
runLoopStatus = "checkStatus"
self.checkStatus()
runLoopStatus = "checkDailyTotals" runLoopStatus = "checkDailyTotals"
self.checkDailyTotals() self.checkDailyTotals()
runLoopStatus = "checkGaugeOffData" runLoopStatus = "checkGaugeOffData"
self.checkGaugeOffData() self.checkGaugeOffData()
runLoopStatus = "checkStoredValues"
self.checkStoredValues(self.forceSend)
runLoopStatus = "getDataLoggerStatus()" runLoopStatus = "getDataLoggerStatus()"
self.getDataLoggerStatus() self.getDataLoggerStatus()
if self.statusChanged: # if self.statusChanged:
runLoopStatus = "getLatestXCards" # runLoopStatus = "getLatestXCards"
self.getLatestXCards(5) # self.getLatestXCards(5)
else: # else:
runLoopStatus = "checkLatestCard" # runLoopStatus = "checkLatestCard"
self.checkLatestCard() # self.checkLatestCard()
runLoopStatus = "checkLatestCard"
self.checkLatestCard()
if self.forceSend or (checkBackupSkipped > checkBackupEvery): if self.forceSend or (checkBackupSkipped > checkBackupEvery):
runLoopStatus = "checkBackup" runLoopStatus = "checkBackup"
@@ -231,6 +264,7 @@ class start(threading.Thread, deviceBase):
except Exception, e: except Exception, e:
sleep_timer = 20 sleep_timer = 20
print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer) print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer)
traceback.print_exc()
time.sleep(sleep_timer) time.sleep(sleep_timer)
def checkBackup(self): def checkBackup(self):
@@ -272,7 +306,36 @@ class start(threading.Thread, deviceBase):
with open('noteIDs.p', 'wb') as handle: with open('noteIDs.p', 'wb') as handle:
pickle.dump(self.noteIDs, handle) pickle.dump(self.noteIDs, handle)
def checkFluidShots(self):
data = json.loads(requests.get(self.device_address + "/json/fluid_shot/get").text)
fluid_shots = data["fluid_shots"]
for shot in fluid_shots:
if int(shot["id"]) not in self.fluidshotIDs:
timestamp = calendar.timegm(time.strptime(shot["shot_datetime"], '%Y-%m-%d %H:%M:%S'))
# we have a new note
self.sendtodbJSON("fluidshots", json.dumps(shot), timestamp)
self.fluidshotIDs.append(int(shot["id"]))
if len(self.fluidshotIDs) > 50:
del self.fluidshotIDs[0]
with open('fluidshotIDs.p', 'wb') as handle:
pickle.dump(self.fluidshotIDs, handle)
def checkWellTests(self):
data = json.loads(requests.get(self.device_address + "/json/well_test/get").text)
well_tests = data["well_tests"]
for test in well_tests:
if int(test["id"]) not in self.welltestIDs:
timestamp = calendar.timegm(time.strptime(test["test_date"], '%Y-%m-%d %H:%M:%S'))
# we have a new note
self.sendtodbJSON("welltests", json.dumps(test), timestamp)
self.welltestIDs.append(int(test["id"]))
if len(self.welltestIDs) > 50:
del self.welltestIDs[0]
with open('welltestIDs.p', 'wb') as handle:
pickle.dump(self.welltestIDs, handle)
def checkStatus(self): def checkStatus(self):
global status
statusMap = { statusMap = {
0: 'Stopped', 0: 'Stopped',
1: 'Running', 1: 'Running',
@@ -288,47 +351,40 @@ class start(threading.Thread, deviceBase):
if st_response.status_code == 200: if st_response.status_code == 200:
data = json.loads(st_response.text) data = json.loads(st_response.text)
date = data["ISOdate"] date = data["ISOdate"]
status = statusMap[int(data["status"])] status_read = statusMap[int(data["status"])]
if channels["status"]["last_value"] != status: if status.last_value != status_read:
self.statusChanged = True self.statusChanged = True
print "Status has changed from {0} to {1} @ {2}".format(channels["status"]["last_value"], status, time.time()) print "Status has changed from {0} to {1} @ {2}".format(status.last_value, status_read, time.time())
else: else:
self.statusChanged = False self.statusChanged = False
if self.statusChanged or self.forceSend: if self.statusChanged or self.forceSend:
self.status = status self.status = status_read
reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z" reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z"
fd = re.search(reg, date) fd = re.search(reg, date)
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7))) dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
# timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ'))) # timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
timestamp = calendar.timegm(dt.timetuple()) timestamp = calendar.timegm(dt.timetuple())
self.sendtodb("status", status, timestamp) self.sendtodb("status", status_read, timestamp)
channels["status"]["last_value"] = status status.last_value = status_read
def checkDailyTotals(self): def checkDailyTotals(self):
data = json.loads(requests.get(self.device_address + "/json/totals").text) data = json.loads(requests.get(self.device_address + "/json/totals").text)
total = data["totals"] if data['status'] == "OK":
if total['status'] == "success": totals = data["totals"]
timestamp = 0 timestamp = 0
for val in total['values']: for val in totals:
# if dt_channels.has_key(val['name']):
if val['name'] in dt_channels: if val['name'] in dt_channels:
if ((time.time() - int(dt_channels[val['name']]['last_time_uploaded'])) > int(dt_channels[val['name']]['min_time_between_uploads'])): if dt_channels[val['name']].checkSend(val['value'], False):
if (float(val['value']) >= (float(dt_channels[val['name']]["last_value"]) + float(dt_channels[val['name']]["change_amount"]))) or (float(val['value']) <= (float(dt_channels[val['name']]["last_value"]) - float(dt_channels[val['name']]["change_amount"]))): self.sendtodb(dt_channels[val['name']].mesh_name, dt_channels[val['name']].value, timestamp)
print("[dailyTotal] {0}: {1}".format(val['name'], val['value']))
self.sendtodb(dt_channels[val['name']]["meshify_channel"], float(val['value']), timestamp)
dt_channels[val['name']]["last_value"] = float(val['value'])
dt_channels[val['name']]["last_time_uploaded"] = time.time()
else: else:
print("checkDailyTotalsError: {0}".format(total.message)) print("checkDailyTotalsError: {0}".format(data.message))
def checkGaugeOffData(self): def checkGaugeOffData(self):
data = json.loads(requests.get(self.device_address + "/json/history").text) data = json.loads(requests.get(self.device_address + "/json/history").text)
day = data["hist"] day = data["hist"]
date = day['gauge_date'] date = day['gauge_date']
# print day["gauge_date"]
# timestamp = time.mktime(time.strptime(day["gauge_date"], '%Y-%m-%dT%H:%M:%S.%fZ'))
reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})" reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})"
fd = re.search(reg, date) fd = re.search(reg, date)
@@ -336,31 +392,19 @@ class start(threading.Thread, deviceBase):
# timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ'))) # timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
timestamp = calendar.timegm(dt.timetuple()) timestamp = calendar.timegm(dt.timetuple())
for entry in day: for entry in day:
# if go_channels.has_key(entry):
if entry in go_channels: if entry in go_channels:
# "percent_run": {"meshifyName": "go_percent_run", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0}, if go_channels[entry].checkSend(day[entry], False):
if go_channels[entry]["last_value"] != day[entry]: self.sendtodb(go_channels[entry].mesh_name, day[entry], timestamp)
print entry, day[entry]
print go_channels[entry]["meshifyName"], day[entry], timestamp
self.sendtodb(go_channels[entry]["meshifyName"], day[entry], timestamp)
go_channels[entry]["last_value"] = day[entry]
def checkLatestCard(self): def checkLatestCard(self):
latest = requests.get(self.device_address + "/json/latest") latest = json.loads(requests.get(self.device_address + "/json/latestcard").text)
latest = json.loads(latest.text)
folder = str(latest["folder"])
file = latest["file"].replace(".csv", "")
# check the card to see if its new # check the card to see if its new
# 1. if its new send the folder/file_name to the card_history channel # 1. if its new send the folder/file_name to the card_history channel
# 2. if its new and its been 10 minutes since you last sent an entire card, then send up all of the data # 2. if its new and its been 10 minutes since you last sent an entire card, then send up all of the data
if channels["card_history"]["last_value"] != (folder + "/" + file): if card_channels['card_id'].checkSend(latest['card_data']['Card_ID'], self.forceSend):
# we have a new card dateTime = str(latest["card_data"]["Stroke_Time"])
# get the data for this event
data = json.loads(requests.get(self.device_address + "/json/" + folder + "/" + file).text)
dateTime = str(data["card_data"]["Stroke_Time"])
# timestamp = time.mktime(time.strptime(dateTime, '%Y-%m-%dT%H:%M:%S.%fZ'))
reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})" reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})"
fd = re.search(reg, dateTime) fd = re.search(reg, dateTime)
@@ -370,18 +414,17 @@ class start(threading.Thread, deviceBase):
print "New card detected @ {0}".format(datetime.strftime(datetime.fromtimestamp(timestamp), "%Y-%m-%d %H:%M:%S.%f")) print "New card detected @ {0}".format(datetime.strftime(datetime.fromtimestamp(timestamp), "%Y-%m-%d %H:%M:%S.%f"))
# set the last value = to current value and upload your data # set the last value = to current value and upload your data
channels["card_history"]["last_value"] = (folder + "/" + file) self.sendtodb("card_history", card_channels['card_id'].value, timestamp)
self.sendtodb("card_history", (folder + "/" + file), timestamp)
# check the last time the card was updated # check the last time the card was updated
if (time.time() - int(channels["card_history"]["last_time_uploaded"])) > self.cardLoopTimer or self.statusChanged or self.forceSend: if (time.time() - card_channels['card'].last_send_time) > self.cardLoopTimer or self.statusChanged or self.forceSend:
# its been 10 minutes, send the full upload # its been 10 minutes, send the full upload
print "Either status has changed or last stored card is too old." print "Either status has changed or last stored card is too old."
channels["card_history"]["last_time_uploaded"] = time.time() card_channels["card"].last_send_time = time.time()
self.process_card(data, timestamp, card_timestamp, sendCards=True) self.process_card(latest, timestamp, card_timestamp, sendCards=True)
return return
else: else:
self.process_card(data, timestamp, card_timestamp, sendCards=False) self.process_card(latest, timestamp, card_timestamp, sendCards=False)
def process_card(self, data, data_timestamp, card_timestamp, sendCards=False): def process_card(self, data, data_timestamp, card_timestamp, sendCards=False):
@@ -392,39 +435,13 @@ class start(threading.Thread, deviceBase):
# NOTE: the initial vaue of "" is given to all channels in the channels object, # NOTE: the initial vaue of "" is given to all channels in the channels object,
# so to avoid comparing a string to a float, and to make sure on startup we send all of the values, the first time through we send everything that has a "" as its last value # so to avoid comparing a string to a float, and to make sure on startup we send all of the values, the first time through we send everything that has a "" as its last value
# We don't want to store any data on starting, just the cards
if self.status != 'Starting':
for channel in data["card_data"]:
# if channels.has_key(channel):
if channel in channels:
if channels[channel]["data_type"] == "str":
if (data["card_data"][channel] != channels[channel]["last_value"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
print "new value for: ", channel
print data["card_data"][channel]
self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
channels[channel]["last_value"] = data["card_data"][channel]
channels[channel]["last_time_uploaded"] = time.time()
if channels[channel]["data_type"] == "float" or channels[channel]["data_type"] == "int":
if channels[channel]["last_value"] == "":
# print "first time getting data"
print "new value for: ", channel
print data["card_data"][channel]
self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
channels[channel]["last_value"] = data["card_data"][channel]
channels[channel]["last_time_uploaded"] = time.time()
if (abs(float(data["card_data"][channel]) - float(channels[channel]["last_value"])) > channels[channel]["change_amount"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
# print "first time getting data"
print "new value for: ", channel
print data["card_data"][channel]
self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
channels[channel]["last_value"] = data["card_data"][channel]
channels[channel]["last_time_uploaded"] = time.time()
if sendCards: if sendCards:
s_p = data["card_data"]["Surface_Position"] self.sendtodb("cardtype", str(data['card_data']['Card_Type']), int(data_timestamp))
s_l = data["card_data"]["Surface_Load"]
d_p = data["card_data"]["Downhole_Position"] s_p = data['card_data']["Surface_Position"]
d_l = data["card_data"]["Downhole_Load"] s_l = data['card_data']["Surface_Load"]
d_p = data['card_data']["Downhole_Position"]
d_l = data['card_data']["Downhole_Load"]
newSc = "[" newSc = "["
newDc = "[" newDc = "["
@@ -453,6 +470,15 @@ class start(threading.Thread, deviceBase):
self.sendtodb("sc", newSc, card_timestamp) self.sendtodb("sc", newSc, card_timestamp)
self.sendtodb("dc", newDc, card_timestamp) self.sendtodb("dc", newDc, card_timestamp)
def checkStoredValues(self, forceSend):
data = json.loads(requests.get(self.device_address + "/json/tagvalues").text)
if data['status'] == "OK":
vals = data['vals']
for val in vals:
if val['name'] in tag_channels:
if tag_channels[val['name']].checkSend(val['val'], forceSend):
self.sendtodbJSON(tag_channels[val['name']].mesh_name, tag_channels[val['name']].value, 0)
def getLatestXCards(self, numCards): def getLatestXCards(self, numCards):
data = json.loads(requests.get(self.device_address + "/json/latest/" + str(int(numCards))).text) data = json.loads(requests.get(self.device_address + "/json/latest/" + str(int(numCards))).text)
for card in data['cards']: for card in data['cards']: