diff --git a/POCloud/HTML Templates/Gauge Off.html b/POCloud/HTML Templates/Gauge Off.html
new file mode 100644
index 0000000..3bbb341
--- /dev/null
+++ b/POCloud/HTML Templates/Gauge Off.html
@@ -0,0 +1,159 @@
+
+
+
" aria-valuemin="0" aria-valuemax="100" style="width: <%= channels["poc.surface_stroke_length"].value %>%;">
+ Surface Stroke Length: <%= channels["poc.surface_stroke_length"].value %> in.
+
+
+
+
+
" aria-valuemin="0" aria-valuemax="100" style="width: <%= channels["poc.downhole_gross_stroke"].value %>%;">
+ Downhole Gross Stroke: <%= channels["poc.downhole_gross_stroke"].value %> in.
+
+
+
+
+
" aria-valuemin="0" aria-valuemax="100" style="width: <%= channels["poc.downhole_adjusted_gross_stroke"].value %>%;">
+ Downhole Adjusted Gross Stroke: <%= channels["poc.downhole_adjusted_gross_stroke"].value %> in.
+
+
+
+
+
" aria-valuemin="0" aria-valuemax="100" style="width: <%= channels["poc.downhole_net_stroke"].value %>%;">
+ Downhole Net Stroke: <%= channels["poc.downhole_net_stroke"].value %> in.
+
+
+
+
+
+
+
+ | Measurement | Value |
+
+
+
+ | Fluid Gradient | <%= channels["poc.fluid_gradient"].value %> lbs/ft |
+ | dt | <%= channels["poc.dt"].value %> sec |
+ | Tubing Head Pressure | <%= channels["poc.tubing_head_pressure"].value %> PSI |
+ | Stuffing Box Friction | <%= channels["poc.stuffing_box_friction"].value %> lbs. |
+
+ | Surface Stroke Length | <%= channels["poc.surface_stroke_length"].value %> in. |
+ | Surface Min. Load | <%= channels["poc.surface_min_load"].value %> lbs. |
+ | Surface Max. Load | <%= channels["poc.surface_max_load"].value %> lbs. |
+
+ | Downhole Gross Stroke | <%= channels["poc.downhole_gross_stroke"].value %> in. |
+ | Tubing Movement | <%= channels["poc.tubing_movement"].value %> in. |
+ | Downhole Adjusted Gross Stroke | <%= channels["poc.downhole_adjusted_gross_stroke"].value %> in. |
+ | Downhole Net Stroke | <%= channels["poc.downhole_net_stroke"].value %> in. |
+ | Downhole Fluid Load | <%= channels["poc.downhole_fluid_load"].value %> lbs. |
+
+ | Polished Rod HP | <%= channels["poc.polished_rod_hp"].value %> HP |
+ | Pump HP | <%= channels["poc.pump_hp"].value %> HP |
+ | Fluid Level | <%= channels["poc.fluid_above_pump"].value %> ft. |
+
+
+
+
+
diff --git a/POCloud/HTML Templates/setup.html b/POCloud/HTML Templates/setup.html
new file mode 100644
index 0000000..454be63
--- /dev/null
+++ b/POCloud/HTML Templates/setup.html
@@ -0,0 +1,311 @@
+
+
+
+
Rod String & Pump
+
+
+
+
+
+
+
+
+
+
+
+
+ | Measurement | Value |
+
+
+
+ | Length | ft. |
+ | Diameter | in. |
+ | Material | |
+ | Damping Factor | |
+
+
+
+
+
+
+
+
+
Motor Data
+
+
+ | Measurement | Value |
+
+
+ | Volts | V |
+ | Hertz | Hz |
+ | Poles | |
+ | Amps | A |
+ | Horsepower | HP |
+ | Service Factor | |
+ | RPM | RPM |
+ | Motor Sheave | in. |
+ | Gearbox Rating | x 1000 in-lbs |
+ | Gearbox Ratio | |
+ | Gearbox Limit | |
+ | Gearbox Sheave | in. |
+ | Max Frequency | Hz |
+ | Min RPM | RPM |
+ | Max RPM | RPM |
+
+
+
+
+
+
+
Well Parameters
+
+
+ | Measurement | Value |
+
+
+ | API Gravity Oil | |
+ | Specific Gravity Water | |
+ | Young's Modulus (Steel) | x 10^6 |
+ | Young's Modulus (Fiberglass) | x 10^6 |
+ | Water Cut | % |
+ | Casing ID | in. |
+ | Tubing OD | in. |
+ | Tubing ID | in. |
+ | Tubing Anchor Depth | ft. |
+ | Pump Diameter | in. |
+ | Pump Constant | |
+ | Structural Rating | x 100 lbs. |
+ | Motor Control Mode | |
+ | Total Vertical Depth | ft. |
+ | Well Type | |
+ | Surface Stroke Length | in. |
+
+
+
+
+
+
+
+
+
diff --git a/POCloud/w_csv/poc.py b/POCloud/w_csv/poc.py
new file mode 100644
index 0000000..e2939b9
--- /dev/null
+++ b/POCloud/w_csv/poc.py
@@ -0,0 +1,467 @@
+#!/usr/bin/python
+
+import types
+import traceback
+import binascii
+import threading
+import time
+import thread
+import os
+import struct
+import sys
+import serial
+import minimalmodbus
+import pickle
+import re
+from device_base import deviceBase
+from datetime import datetime
+
+import requests
+try:
+ import json
+except:
+ import simplejson as json
+import calendar
+
+
+def min_max_check(val, min, max):
+ if val < min:
+ return min
+ elif val > max:
+ return max
+ else:
+ return val
+
+
+go_channels = {
+ "percent_run": {"meshifyName": "go_percent_run", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "kWh": {"meshifyName": "go_kwh", "last_value": "", "last_send_time": 0, "data_type": " float", "change_amount": 0},
+ "electricity_cost": {"meshifyName": "go_electricity_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "peak_load": {"meshifyName": "go_peak_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "min_load": {"meshifyName": "go_min_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "average_SPM": {"meshifyName": "go_average_spm", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "production_calculated": {"meshifyName": "go_production_calculated", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "full_card_production": {"meshifyName": "go_full_card_production", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "polished_rod_HP": {"meshifyName": "go_polished_rod_hp", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "lifting_cost": {"meshifyName": "go_lifting_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "fluid_above_pump": {"meshifyName": "go_fluid_above_pump", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "pump_intake_pressure": {"meshifyName": "go_pump_intake_pressure", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "kWh_regen": {"meshifyName": "go_kwh_regen", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "inflow_rate": {"meshifyName": "go_inflow_rate", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ }
+
+stroke_data_min_upload_time = 300 # seconds
+
+channels = {
+ "status": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
+ "card_history": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
+ "well_name": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "tubing_head_pressure": {"last_value": "", "data_type": "float", "change_amount": 5, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "fluid_gradient": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "stuffing_box_friction": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "dt": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_adjusted_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_net_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_fluid_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "surface_max_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "surface_min_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "tubing_movement": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "surface_stroke_length": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "fillage_percent": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "polished_rod_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "pump_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "SPM": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "fluid_above_pump": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "pump_intake_pressure": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "stroke_production": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "drive_torque_mode": {"last_value": "", "data_type": "int", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "torque_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "speed_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_min_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_max_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ }
+
+
+total_min_upload_time = 300 # seconds
+dt_channels = { # Current Daily Totals
+ "Average_SPM": {"meshify_channel": "dt_average_spm", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Downhole_Net_Stroke": {"meshify_channel": "dt_downhole_net_stroke", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Electricity_Cost": {"meshify_channel": "dt_electricity_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Fluid_Level": {"meshify_channel": "dt_fluid_level", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Full_Card_Production": {"meshify_channel": "dt_full_card_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Inflow_Rate": {"meshify_channel": "dt_inflow_rate", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "kWh": {"meshify_channel": "dt_kWh", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "kWh_Regen": {"meshify_channel": "dt_kWh_regen", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Lifting_Cost": {"meshify_channel": "dt_lifting_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Peak_Load": {"meshify_channel": "dt_peak_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Min_Load": {"meshify_channel": "dt_min_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Percent_Run": {"meshify_channel": "dt_percent_run", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Polished_Rod_HP": {"meshify_channel": "dt_polished_rod_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Calculated_Production": {"meshify_channel": "dt_calculated_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Projected_Production": {"meshify_channel": "dt_projected_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Pump_HP": {"meshify_channel": "dt_pump_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Pump_Intake_Presure": {"meshify_channel": "dt_pump_intake_pressure", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Surface_Stroke_Length": {"meshify_channel": "dt_surface_stroke_length", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Tubing_Movement": {"meshify_channel": "dt_tubing_movement", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+}
+
+
+class start(threading.Thread, deviceBase):
+ def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
+ threading.Thread.__init__(self)
+ deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
+
+ self.daemon = True
+ self.forceSend = True
+ self.version = "2"
+ self.device_address = "http://192.168.1.30/"
+ self.cardLoopTimer = 600
+ self.finished = threading.Event()
+ threading.Thread.start(self)
+ self.statusChanged = False
+ self.al_status_last = False
+ self.dl_status_last = False
+
+ # load stored event ID's
+ try:
+ with open('eventIds.p', 'rb') as handle:
+ self.eventIds = pickle.load(handle)
+
+ print "found pickled eventID dictionary: {0}".format(self.eventIds)
+ except:
+ print "couldn't load enent ID's from pickle"
+ self.eventIds = []
+
+ # load stored wellconfig's
+ try:
+ with open('wellSetup.p', 'rb') as handle:
+ self.wellSetup = pickle.load(handle)
+
+ print "Found pickled Well Setup (but it's going to be too long to print)"
+ # print self.wellConfig
+ except:
+ print "couldn't load Well Setup from pickle"
+ self.wellSetup = []
+
+ self.sendtodbJSON("device_address", self.device_address, 0)
+
+ # this is a required function for all drivers, its goal is to upload some piece of data
+ # about your device so it can be seen on the web
+ def register(self):
+ channels["status"]["last_value"] = ""
+
+ def run(self):
+ self.runLoopStatus = ""
+ checkBackupEvery = 100
+ checkBackupSkipped = 1
+ while True:
+ try:
+ runLoopStatus = "checkEvents"
+ self.checkEvents()
+
+ runLoopStatus = "checkStatus"
+ self.checkStatus()
+
+ runLoopStatus = "checkDailyTotals"
+ self.checkDailyTotals()
+
+ runLoopStatus = "checkGaugeOffData"
+ self.checkGaugeOffData()
+
+ runLoopStatus = "getDataLoggerStatus()"
+ self.getDataLoggerStatus()
+
+ if self.statusChanged:
+ runLoopStatus = "getLatestXCards"
+ self.getLatestXCards(5)
+ else:
+ runLoopStatus = "checkLatestCard"
+ self.checkLatestCard()
+
+ if self.forceSend or (checkBackupSkipped > checkBackupEvery):
+ runLoopStatus = "checkBackup"
+ self.checkBackup()
+ checkBackupSkipped = 0
+ checkBackupSkipped = checkBackupSkipped + 1
+
+ runLoopStatus = "Complete"
+ time.sleep(3)
+ self.forceSend = False
+ except Exception, e:
+ sleep_timer = 20
+ print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer)
+ time.sleep(sleep_timer)
+
+ def checkBackup(self):
+ backupList = json.loads(requests.get(self.device_address + "/json/backups").text)
+ file = backupList["backups"][0]
+ data = json.loads(requests.get(self.device_address + "/json/backups/" + file).text)
+ timestamp = time.time()
+ if data != self.wellSetup or self.forceSend:
+ self.sendtodbJSON("well_setup", json.dumps(data), timestamp)
+ self.wellSetup = data
+ with open('wellSetup.p', 'wb') as handle:
+ pickle.dump(self.wellSetup, handle)
+
+ def checkEvents(self):
+ data = json.loads(requests.get(self.device_address + "/json/event_list").text)
+ events = data["events"]
+ for event in events:
+ if int(event["id"]) not in self.eventIds:
+ # timestamp = calendar.timegm(time.strptime(event["datetime"], '%Y-%m-%dT%H:%M:%S.%fZ'))
+ date = event['datetime']
+ reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d*)Z"
+ fd = re.search(reg, date)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
+ # timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
+ timestamp = calendar.timegm(dt.timetuple())
+
+ # we have a new event
+ self.sendtodbJSON("events", json.dumps(event), timestamp)
+ self.eventIds.append(int(event["id"]))
+ if len(self.eventIds) > 50:
+ del self.eventIds[0]
+ with open('eventIds.p', 'wb') as handle:
+ pickle.dump(self.eventIds, handle)
+
+ def checkStatus(self):
+ statusMap = {
+ 0: 'Stopped',
+ 1: 'Running',
+ 2: 'Pumped Off',
+ 3: 'Faulted',
+ 4: 'Starting',
+ 5: 'Recovering',
+ 100: 'Read Error',
+ 1000: 'PLC Error',
+ 9999: 'No Response'
+ }
+ st_response = requests.get(self.device_address + "/json/status")
+ if st_response.status_code == 200:
+ data = json.loads(st_response.text)
+ date = data["ISOdate"]
+ status = statusMap[int(data["status"])]
+
+ if channels["status"]["last_value"] != status:
+ self.statusChanged = True
+ print "Status has changed from {0} to {1} @ {2}".format(channels["status"]["last_value"], status, time.time())
+ else:
+ self.statusChanged = False
+
+ if self.statusChanged or self.forceSend:
+ self.status = status
+ # timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
+ reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d*)Z"
+ fd = re.search(reg, date)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
+ timestamp = calendar.timegm(dt.timetuple())
+
+ self.sendtodb("status", status, timestamp)
+ channels["status"]["last_value"] = status
+ self.checkLatestCard()
+
+ def checkDailyTotals(self):
+ data = json.loads(requests.get(self.device_address + "/json/totals").text)
+ total = data["totals"]
+ if total['status'] == "success":
+ timestamp = 0
+ for val in total['values']:
+ if dt_channels.has_key(val['name']):
+ if ((time.time() - int(dt_channels[val['name']]['last_time_uploaded'])) > int(dt_channels[val['name']]['min_time_between_uploads'])):
+ if (float(val['value']) >= (float(dt_channels[val['name']]["last_value"]) + float(dt_channels[val['name']]["change_amount"]))) or (float(val['value']) <= (float(dt_channels[val['name']]["last_value"]) - float(dt_channels[val['name']]["change_amount"]))):
+ print("[dailyTotal] {0}: {1}".format(val['name'], val['value']))
+ self.sendtodb(dt_channels[val['name']]["meshify_channel"], float(val['value']), timestamp)
+ dt_channels[val['name']]["last_value"] = float(val['value'])
+ dt_channels[val['name']]["last_time_uploaded"] = time.time()
+ else:
+ print("checkDailyTotalsError: {0}".format(total.message))
+
+ def checkGaugeOffData(self):
+ data = json.loads(requests.get(self.device_address + "/json/history").text)
+ day = data["hist"]
+ date = day['gauge_date']
+ # print day["gauge_date"]
+ # timestamp = time.mktime(time.strptime(day["gauge_date"], '%Y-%m-%dT%H:%M:%S.%fZ'))
+ reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d*)Z"
+ fd = re.search(reg, date)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
+ # timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
+ timestamp = calendar.timegm(dt.timetuple())
+
+ for entry in day:
+ if go_channels.has_key(entry):
+ # "percent_run":{"meshifyName":"go_percent_run","last_value":"","last_send_time":0,"data_type":"float","change_amount":0},
+ if go_channels[entry]["last_value"] != day[entry]:
+ print entry, day[entry]
+ print go_channels[entry]["meshifyName"], day[entry], timestamp
+ self.sendtodb(go_channels[entry]["meshifyName"], day[entry], timestamp)
+ go_channels[entry]["last_value"] = day[entry]
+
+ def checkLatestCard(self):
+ latest = requests.get(self.device_address + "/json/latest")
+ latest = json.loads(latest.text)
+ folder = str(latest["folder"])
+ file = latest["file"].replace(".csv", "")
+
+ # check the card to see if its new
+ # 1. if its new send the folder/file_name to the card_history channel
+ # 2. if its new and its been 10 minutes since you last sent an entire card, then send up all of the data
+
+ if channels["card_history"]["last_value"] != (folder + "/" + file):
+ # we have a new card
+ # get the data for this event
+ data = json.loads(requests.get(self.device_address + "/json/" + folder + "/" + file).text)
+ dateTime = str(data["contents"]["utctime"])
+
+ # timestamp = time.mktime(time.strptime(dateTime, '%Y-%m-%d %H:%M:%S.%f'))
+
+ reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2}).(\d*)"
+ fd = re.search(reg, dateTime)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
+ timestamp = calendar.timegm(dt.timetuple())
+ card_timestamp = int(time.mktime(dt.timetuple()))
+
+ print "New card detected @ {0}".format(datetime.strftime(datetime.fromtimestamp(timestamp), "%Y-%m-%d %H:%M:%S.%f"))
+ # set the last value = to current value and upload your data
+ channels["card_history"]["last_value"] = (folder + "/" + file)
+ self.sendtodb("card_history", (folder + "/" + file), card_timestamp)
+
+ # check the last time the card was updated
+ if (time.time() - int(channels["card_history"]["last_time_uploaded"])) > self.cardLoopTimer or self.statusChanged or self.forceSend:
+ # its been 10 minutes, send the full upload
+ print "Either status has changed or last stored card is too old."
+ channels["card_history"]["last_time_uploaded"] = time.time()
+ self.process_card(data, timestamp, card_timestamp, sendCards=True)
+ return
+ else:
+ self.process_card(data, timestamp, card_timestamp, sendCards=False)
+
+ def process_card(self, data, data_timestamp, card_timestamp, sendCards=False):
+ # if sendCards = True then we upload all data no matter what, including cards
+
+ # check what type of data it is
+ # check if its changed, if it has, how long has it been since it changed
+ # NOTE: the initial vaue of "" is given to all channels in the channels object,
+ # so to avoid comparing a string to a float, and to make sure on startup we send all of the values, the first time through we send everything that has a "" as its last value
+
+ # We don't want to store any data on starting, just the cards
+ if self.status != 'Starting':
+ for channel in data["contents"]:
+ if channels.has_key(channel):
+ if channels[channel]["data_type"] == "str":
+ if (data["contents"][channel] != channels[channel]["last_value"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
+ print "new value for: ", channel
+ print data["contents"][channel]
+ self.sendtodb(channel, str(data["contents"][channel]), int(data_timestamp))
+ channels[channel]["last_value"] = data["contents"][channel]
+ channels[channel]["last_time_uploaded"] = time.time()
+ if channels[channel]["data_type"] == "float" or channels[channel]["data_type"] == "int":
+ if channels[channel]["last_value"] == "":
+ # print "first time getting data"
+ print "new value for: ", channel
+ print data["contents"][channel]
+ self.sendtodb(channel, str(data["contents"][channel]), int(data_timestamp))
+ channels[channel]["last_value"] = data["contents"][channel]
+ channels[channel]["last_time_uploaded"] = time.time()
+ if (abs(float(data["contents"][channel]) - float(channels[channel]["last_value"])) > channels[channel]["change_amount"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
+ # print "first time getting data"
+ print "new value for: ", channel
+ print data["contents"][channel]
+ self.sendtodb(channel, str(data["contents"][channel]), int(data_timestamp))
+ channels[channel]["last_value"] = data["contents"][channel]
+ channels[channel]["last_time_uploaded"] = time.time()
+
+ if sendCards:
+ sc = data["s"]
+ dc = data["d"]
+ for i in range(len(data["d"])):
+ try:
+ for x in range(len(data["d"][i])):
+ data["d"][i][x] = float('%.3f' % data["d"][i][x])
+ except Exception, e:
+ print e
+ for i in range(len(data["s"])):
+ try:
+ for x in range(len(data["s"][i])):
+ data["s"][i][x] = float('%.3f' % data["s"][i][x])
+ except Exception, e:
+ print e
+
+ sc = data["s"]
+ dc = data["d"]
+ newSc = "["
+ for i in sc:
+ try:
+ if i[0] is None:
+ continue
+ if i[0] != 0.0 and i[1] != 0.0:
+ newSc += "[" + str(i[0]) + "," + str(i[1]) + "],"
+ except:
+ pass
+ newSc += "[" + str(sc[0][0]) + "," + str(sc[0][1]) + "]"
+ newSc += "]"
+
+ newDc = "["
+ for i in dc:
+ try:
+ if i[0] is None:
+ continue
+ if i[0] != 0.0 and i[1] != 0.0:
+ newDc += "[" + str(i[0]) + "," + str(i[1]) + "],"
+ except:
+ pass
+ newDc += "[" + str(dc[0][0]) + "," + str(dc[0][1]) + "]"
+ newDc += "]"
+
+ self.sendtodb("sc", newSc, card_timestamp)
+ self.sendtodb("dc", newDc, card_timestamp)
+
+ def getLatestXCards(self, numCards):
+ data = json.loads(requests.get(self.device_address + "/json/latest/" + str(int(numCards))).text)
+ for card in data['cards']:
+ card_data = json.loads(requests.get(self.device_address + "/json/" + data['folder'] + "/" + card).text)
+ dateTime = str(card_data["contents"]["utctime"])
+ # timestamp = time.mktime(time.strptime(dateTime, '%Y-%m-%d %H:%M:%S.%f'))
+
+ reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2}).(\d*)"
+ fd = re.search(reg, dateTime)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
+ timestamp = calendar.timegm(dt.timetuple())
+ card_timestamp = int(time.mktime(dt.timetuple()))
+
+ channels["card_history"]["last_value"] = (data['folder'] + "/" + card)
+ self.sendtodb("card_history", (data['folder'] + "/" + card), card_timestamp)
+ self.process_card(card_data, timestamp, card_timestamp, sendCards=True)
+
+ def getDataLoggerStatus(self):
+ data = json.loads(requests.get(self.device_address + "/json/pythonstatus/").text)
+ al_status = "Not OK"
+ if data['status']['alarmLogger']:
+ al_status = "OK"
+
+ if al_status != self.al_status_last:
+ self.sendtodb("alarmlogger_status", al_status, 0)
+ self.al_status_last = al_status
+
+ dl_status = "Not OK"
+ if data['status']['dataLogger']:
+ dl_status = "OK"
+ if al_status != self.dl_status_last:
+ self.sendtodb("datalogger_status", dl_status, 0)
+ self.dl_status_last = dl_status
+
+ def poc_get_card(self, name, value):
+ self.getcard(value)
+
+ def poc_sync(self, name, value):
+ self.sendtodb("connected", "true", 0)
+ return True
+
+ def poc_set_address(self, name, value):
+ self.device_address = value
+ return True
+
+ def poc_refresh_data(self, name, value):
+ self.forceSend = True
+ return True
diff --git a/POCloud/w_mysql/poc.py b/POCloud/w_mysql/poc.py
new file mode 100644
index 0000000..8bea956
--- /dev/null
+++ b/POCloud/w_mysql/poc.py
@@ -0,0 +1,446 @@
+#!/usr/bin/python
+
+import types
+import traceback
+import binascii
+import threading
+import time
+import thread
+import os
+import struct
+import sys
+import serial
+import minimalmodbus
+import pickle
+import re
+from device_base import deviceBase
+from datetime import datetime
+
+import requests
+try:
+ import json
+except:
+ import simplejson as json
+import calendar
+
+
+def min_max_check(val, min, max):
+ if val < min:
+ return min
+ elif val > max:
+ return max
+ else:
+ return val
+
+go_channels = {
+ "percent_run": {"meshifyName": "go_percent_run", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "kWh": {"meshifyName": "go_kwh", "last_value": "", "last_send_time": 0, "data_type": " float", "change_amount": 0},
+ "electricity_cost": {"meshifyName": "go_electricity_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "peak_load": {"meshifyName": "go_peak_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "min_load": {"meshifyName": "go_min_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "average_SPM": {"meshifyName": "go_average_spm", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "production_calculated": {"meshifyName": "go_production_calculated", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "full_card_production": {"meshifyName": "go_full_card_production", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "polished_rod_HP": {"meshifyName": "go_polished_rod_hp", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "lifting_cost": {"meshifyName": "go_lifting_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "fluid_above_pump": {"meshifyName": "go_fluid_above_pump", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "pump_intake_pressure": {"meshifyName": "go_pump_intake_pressure", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "kWh_regen": {"meshifyName": "go_kwh_regen", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "inflow_rate": {"meshifyName": "go_inflow_rate", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ }
+
+stroke_data_min_upload_time = 300 # seconds
+
+channels = {
+ "status": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
+ "card_history": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
+ "well_name": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "tubing_head_pressure": {"last_value": "", "data_type": "float", "change_amount": 5, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "fluid_gradient": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "stuffing_box_friction": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "dt": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_adjusted_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_net_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_fluid_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "surface_max_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "surface_min_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "tubing_movement": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "surface_stroke_length": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "fillage_percent": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "polished_rod_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "pump_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "SPM": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "fluid_above_pump": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "pump_intake_pressure": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "stroke_production": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "drive_torque_mode": {"last_value": "", "data_type": "int", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "torque_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "speed_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_min_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_max_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ }
+
+
+total_min_upload_time = 300 # seconds
+dt_channels = { # Current Daily Totals
+ "Average_SPM": {"meshify_channel": "dt_average_spm", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Downhole_Net_Stroke": {"meshify_channel": "dt_downhole_net_stroke", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Electricity_Cost": {"meshify_channel": "dt_electricity_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Fluid_Level": {"meshify_channel": "dt_fluid_level", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Full_Card_Production": {"meshify_channel": "dt_full_card_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Inflow_Rate": {"meshify_channel": "dt_inflow_rate", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "kWh": {"meshify_channel": "dt_kWh", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "kWh_Regen": {"meshify_channel": "dt_kWh_regen", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Lifting_Cost": {"meshify_channel": "dt_lifting_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Peak_Load": {"meshify_channel": "dt_peak_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Min_Load": {"meshify_channel": "dt_min_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Percent_Run": {"meshify_channel": "dt_percent_run", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Polished_Rod_HP": {"meshify_channel": "dt_polished_rod_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Calculated_Production": {"meshify_channel": "dt_calculated_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Projected_Production": {"meshify_channel": "dt_projected_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Pump_HP": {"meshify_channel": "dt_pump_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Pump_Intake_Presure": {"meshify_channel": "dt_pump_intake_pressure", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Surface_Stroke_Length": {"meshify_channel": "dt_surface_stroke_length", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Tubing_Movement": {"meshify_channel": "dt_tubing_movement", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+}
+
+
+class start(threading.Thread, deviceBase):
+
+ def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
+ threading.Thread.__init__(self)
+ deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
+
+ self.daemon = True
+ self.forceSend = True
+ self.version = "2"
+ # self.device_address = "http://192.168.1.30/"
+ self.device_address = "http://localhost/"
+ self.cardLoopTimer = 600
+ self.finished = threading.Event()
+ threading.Thread.start(self)
+ self.statusChanged = False
+ self.al_status_last = False
+ self.dl_status_last = False
+
+ # load stored event ID's
+ try:
+ with open('eventIds.p', 'rb') as handle:
+ self.eventIds = pickle.load(handle)
+
+ print "found pickled eventID dictionary: {0}".format(self.eventIds)
+ except:
+ print "couldn't load enent ID's from pickle"
+ self.eventIds = []
+
+ # load stored wellconfig's
+ try:
+ with open('wellSetup.p', 'rb') as handle:
+ self.wellSetup = pickle.load(handle)
+
+ print "Found pickled Well Setup (but it's going to be too long to print)"
+ # print self.wellConfig
+ except:
+ print "couldn't load Well Setup from pickle"
+ self.wellSetup = []
+
+ self.sendtodbJSON("device_address", self.device_address, 0)
+
+ # this is a required function for all drivers, its goal is to upload some piece of data
+ # about your device so it can be seen on the web
+ def register(self):
+ channels["status"]["last_value"] = ""
+
+ def run(self):
+ self.runLoopStatus = ""
+ checkBackupEvery = 100
+ checkBackupSkipped = 1
+ while True:
+ try:
+ runLoopStatus = "checkEvents"
+ self.checkEvents()
+
+ runLoopStatus = "checkStatus"
+ self.checkStatus()
+
+ runLoopStatus = "checkDailyTotals"
+ self.checkDailyTotals()
+
+ runLoopStatus = "checkGaugeOffData"
+ self.checkGaugeOffData()
+
+ runLoopStatus = "getDataLoggerStatus()"
+ self.getDataLoggerStatus()
+
+ if self.statusChanged:
+ runLoopStatus = "getLatestXCards"
+ self.getLatestXCards(5)
+ else:
+ runLoopStatus = "checkLatestCard"
+ self.checkLatestCard()
+
+ if self.forceSend or (checkBackupSkipped > checkBackupEvery):
+ runLoopStatus = "checkBackup"
+ self.checkBackup()
+ checkBackupSkipped = 0
+ checkBackupSkipped = checkBackupSkipped + 1
+
+ runLoopStatus = "Complete"
+ time.sleep(3)
+ self.forceSend = False
+ except Exception, e:
+ sleep_timer = 20
+ print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer)
+ time.sleep(sleep_timer)
+
+ def checkBackup(self):
+ backupList = json.loads(requests.get(self.device_address + "/json/backups").text)
+ file = backupList["backups"][0]
+ data = json.loads(requests.get(self.device_address + "/json/backups/" + file).text)
+ timestamp = time.time()
+ if data != self.wellSetup or self.forceSend:
+ self.sendtodbJSON("well_setup", json.dumps(data), timestamp)
+ self.wellSetup = data
+ with open('wellSetup.p', 'wb') as handle:
+ pickle.dump(self.wellSetup, handle)
+
+ def checkEvents(self):
+ data = json.loads(requests.get(self.device_address + "/json/event_list").text)
+ events = data["events"]
+ for event in events:
+ if int(event["id"]) not in self.eventIds:
+ timestamp = calendar.timegm(time.strptime(event["datetime"], '%Y-%m-%dT%H:%M:%S.%fZ'))
+ # we have a new event
+ self.sendtodbJSON("events", json.dumps(event), timestamp)
+ self.eventIds.append(int(event["id"]))
+ if len(self.eventIds) > 50:
+ del self.eventIds[0]
+ with open('eventIds.p', 'wb') as handle:
+ pickle.dump(self.eventIds, handle)
+
+ def checkStatus(self):
+ statusMap = {
+ 0: 'Stopped',
+ 1: 'Running',
+ 2: 'Pumped Off',
+ 3: 'Faulted',
+ 4: 'Starting',
+ 5: 'Recovering',
+ 100: 'Read Error',
+ 1000: 'PLC Error',
+ 9999: 'No Response'
+ }
+ st_response = requests.get(self.device_address + "/json/status")
+ if st_response.status_code == 200:
+ data = json.loads(st_response.text)
+ date = data["ISOdate"]
+ status = statusMap[int(data["status"])]
+
+ if channels["status"]["last_value"] != status:
+ self.statusChanged = True
+ print "Status has changed from {0} to {1} @ {2}".format(channels["status"]["last_value"], status, time.time())
+ else:
+ self.statusChanged = False
+
+ if self.statusChanged or self.forceSend:
+ self.status = status
+ reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z"
+ fd = re.search(reg, date)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
+ # timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
+ timestamp = calendar.timegm(dt.timetuple())
+ self.sendtodb("status", status, timestamp)
+ channels["status"]["last_value"] = status
+
+ def checkDailyTotals(self):
+ data = json.loads(requests.get(self.device_address + "/json/totals").text)
+ total = data["totals"]
+ if total['status'] == "success":
+ timestamp = 0
+ for val in total['values']:
+ if dt_channels.has_key(val['name']):
+ if ((time.time() - int(dt_channels[val['name']]['last_time_uploaded'])) > int(dt_channels[val['name']]['min_time_between_uploads'])):
+ if (float(val['value']) >= (float(dt_channels[val['name']]["last_value"]) + float(dt_channels[val['name']]["change_amount"]))) or (float(val['value']) <= (float(dt_channels[val['name']]["last_value"]) - float(dt_channels[val['name']]["change_amount"]))):
+ print("[dailyTotal] {0}: {1}".format(val['name'], val['value']))
+ self.sendtodb(dt_channels[val['name']]["meshify_channel"], float(val['value']), timestamp)
+ dt_channels[val['name']]["last_value"] = float(val['value'])
+ dt_channels[val['name']]["last_time_uploaded"] = time.time()
+ else:
+ print("checkDailyTotalsError: {0}".format(total.message))
+
+ def checkGaugeOffData(self):
+ data = json.loads(requests.get(self.device_address + "/json/history").text)
+ day = data["hist"]
+ date = day['gauge_date']
+ # print day["gauge_date"]
+ # timestamp = time.mktime(time.strptime(day["gauge_date"], '%Y-%m-%dT%H:%M:%S.%fZ'))
+
+ reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z"
+ fd = re.search(reg, date)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
+ # timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
+ timestamp = calendar.timegm(dt.timetuple())
+ for entry in day:
+ if go_channels.has_key(entry):
+ # "percent_run": {"meshifyName": "go_percent_run", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ if go_channels[entry]["last_value"] != day[entry]:
+ print entry, day[entry]
+ print go_channels[entry]["meshifyName"], day[entry], timestamp
+ self.sendtodb(go_channels[entry]["meshifyName"], day[entry], timestamp)
+ go_channels[entry]["last_value"] = day[entry]
+
+ def checkLatestCard(self):
+ latest = requests.get(self.device_address + "/json/latest")
+ latest = json.loads(latest.text)
+ folder = str(latest["folder"])
+ file = latest["file"].replace(".csv", "")
+
+ # check the card to see if its new
+ # 1. if its new send the folder/file_name to the card_history channel
+ # 2. if its new and its been 10 minutes since you last sent an entire card, then send up all of the data
+
+ if channels["card_history"]["last_value"] != (folder + "/" + file):
+ # we have a new card
+ # get the data for this event
+ data = json.loads(requests.get(self.device_address + "/json/" + folder + "/" + file).text)
+ dateTime = str(data["card_data"]["Stroke_Time"])
+ # timestamp = time.mktime(time.strptime(dateTime, '%Y-%m-%dT%H:%M:%S.%fZ'))
+
+ reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z"
+ fd = re.search(reg, dateTime)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
+ timestamp = calendar.timegm(dt.timetuple())
+ card_timestamp = int(time.mktime(dt.timetuple()))
+
+ print "New card detected @ {0}".format(datetime.strftime(datetime.fromtimestamp(timestamp), "%Y-%m-%d %H:%M:%S.%f"))
+ # set the last value = to current value and upload your data
+ channels["card_history"]["last_value"] = (folder + "/" + file)
+ self.sendtodb("card_history", (folder + "/" + file), timestamp)
+
+ # check the last time the card was updated
+ if (time.time() - int(channels["card_history"]["last_time_uploaded"])) > self.cardLoopTimer or self.statusChanged or self.forceSend:
+ # its been 10 minutes, send the full upload
+ print "Either status has changed or last stored card is too old."
+ channels["card_history"]["last_time_uploaded"] = time.time()
+ self.process_card(data, timestamp, card_timestamp, sendCards=True)
+ return
+ else:
+ self.process_card(data, timestamp, card_timestamp, sendCards=False)
+
+ def process_card(self, data, data_timestamp, card_timestamp, sendCards=False):
+
+ # if sendCards = True then we upload all data no matter what, including cards
+
+ # check what type of data it is
+ # check if its changed, if it has, how long has it been since it changed
+ # NOTE: the initial vaue of "" is given to all channels in the channels object,
+ # so to avoid comparing a string to a float, and to make sure on startup we send all of the values, the first time through we send everything that has a "" as its last value
+
+ # We don't want to store any data on starting, just the cards
+ if self.status != 'Starting':
+ for channel in data["card_data"]:
+ if channels.has_key(channel):
+ if channels[channel]["data_type"] == "str":
+ if (data["card_data"][channel] != channels[channel]["last_value"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
+ print "new value for: ", channel
+ print data["card_data"][channel]
+ self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
+ channels[channel]["last_value"] = data["card_data"][channel]
+ channels[channel]["last_time_uploaded"] = time.time()
+ if channels[channel]["data_type"] == "float" or channels[channel]["data_type"] == "int":
+ if channels[channel]["last_value"] == "":
+ # print "first time getting data"
+ print "new value for: ", channel
+ print data["card_data"][channel]
+ self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
+ channels[channel]["last_value"] = data["card_data"][channel]
+ channels[channel]["last_time_uploaded"] = time.time()
+ if (abs(float(data["card_data"][channel]) - float(channels[channel]["last_value"])) > channels[channel]["change_amount"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
+ # print "first time getting data"
+ print "new value for: ", channel
+ print data["card_data"][channel]
+ self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
+ channels[channel]["last_value"] = data["card_data"][channel]
+ channels[channel]["last_time_uploaded"] = time.time()
+
+ if sendCards:
+ s_p = data["card_data"]["Surface_Position"]
+ s_l = data["card_data"]["Surface_Load"]
+ d_p = data["card_data"]["Downhole_Position"]
+ d_l = data["card_data"]["Downhole_Load"]
+ newSc = "["
+ newDc = "["
+
+ for i in range(len(s_p)):
+ try:
+ if s_p[i] is None:
+ continue
+ if s_p[i] != 0.0 and s_l[i] != 0.0:
+ newSc += "[" + str(s_p[i]) + ", " + str(s_l[i]) + "], "
+ except:
+ pass
+ newSc += "[" + str(s_p[0]) + ", " + str(s_l[0]) + "]"
+ newSc += "]"
+
+ for i in range(len(d_p)):
+ try:
+ if d_p[i] is None:
+ continue
+ if d_p[i] != 0.0 and d_l[i] != 0.0:
+ newDc += "[" + str(d_p[i]) + ", " + str(d_l[i]) + "], "
+ except:
+ pass
+ newDc += "[" + str(d_p[0]) + ", " + str(d_l[0]) + "]"
+ newDc += "]"
+
+ self.sendtodb("sc", newSc, card_timestamp)
+ self.sendtodb("dc", newDc, card_timestamp)
+
+ def getLatestXCards(self, numCards):
+ data = json.loads(requests.get(self.device_address + "/json/latest/" + str(int(numCards))).text)
+ for card in data['cards']:
+ card_data = json.loads(requests.get(self.device_address + "/json/" + data['folder'] + "/" + card).text)
+ dateTime = str(card_data["card_data"]["Stroke_Time"])
+ # timestamp = time.mktime(time.strptime(dateTime,'%Y-%m-%dT%H:%M:%S.%fZ'))
+ reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z"
+ fd = re.search(reg, dateTime)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
+ timestamp = calendar.timegm(dt.timetuple())
+ card_timestamp = int(time.mktime(dt.timetuple()))
+
+ channels["card_history"]["last_value"] = (data['folder'] + "/" + card)
+ self.sendtodb("card_history", (data['folder'] + "/" + card), card_timestamp)
+ self.process_card(card_data, timestamp, sendCards=True)
+
+ def getDataLoggerStatus(self):
+ data = json.loads(requests.get(self.device_address + "/json/pythonstatus/").text)
+ al_status = "Not OK"
+ if data['status']['alarmLogger']:
+ al_status = "OK"
+
+ if al_status != self.al_status_last:
+ self.sendtodb("alarmlogger_status", al_status, 0)
+ self.al_status_last = al_status
+
+ dl_status = "Not OK"
+ if data['status']['dataLogger']:
+ dl_status = "OK"
+ if al_status != self.dl_status_last:
+ self.sendtodb("datalogger_status", dl_status, 0)
+ self.dl_status_last = dl_status
+
+ def poc_get_card(self, name, value):
+ self.getcard(value)
+
+ def poc_sync(self, name, value):
+ self.sendtodb("connected", "true", 0)
+ return True
+
+ def poc_set_address(self, name, value):
+ self.device_address = value
+ return True
+
+ def poc_refresh_data(self, name, value):
+ self.forceSend = True
+ return True
diff --git a/POCloud/w_sqlite/poc.py b/POCloud/w_sqlite/poc.py
new file mode 100644
index 0000000..fb94af0
--- /dev/null
+++ b/POCloud/w_sqlite/poc.py
@@ -0,0 +1,449 @@
+#!/usr/bin/python
+
+import types
+import traceback
+import binascii
+import threading
+import time
+import thread
+import os
+import struct
+import sys
+import serial
+import minimalmodbus
+import pickle
+import re
+from device_base import deviceBase
+from datetime import datetime
+
+import requests
+try:
+ import json
+except:
+ import simplejson as json
+import calendar
+
+
+def min_max_check(val, min, max):
+ if val < min:
+ return min
+ elif val > max:
+ return max
+ else:
+ return val
+
+go_channels = {
+ "percent_run": {"meshifyName": "go_percent_run", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "kWh": {"meshifyName": "go_kwh", "last_value": "", "last_send_time": 0, "data_type": " float", "change_amount": 0},
+ "electricity_cost": {"meshifyName": "go_electricity_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "peak_load": {"meshifyName": "go_peak_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "min_load": {"meshifyName": "go_min_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "average_SPM": {"meshifyName": "go_average_spm", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "production_calculated": {"meshifyName": "go_production_calculated", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "full_card_production": {"meshifyName": "go_full_card_production", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "polished_rod_HP": {"meshifyName": "go_polished_rod_hp", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "lifting_cost": {"meshifyName": "go_lifting_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "fluid_above_pump": {"meshifyName": "go_fluid_above_pump", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "pump_intake_pressure": {"meshifyName": "go_pump_intake_pressure", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "kWh_regen": {"meshifyName": "go_kwh_regen", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ "inflow_rate": {"meshifyName": "go_inflow_rate", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ }
+
+stroke_data_min_upload_time = 300 # seconds
+
+channels = {
+ "status": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
+ "card_history": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
+ "well_name": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "tubing_head_pressure": {"last_value": "", "data_type": "float", "change_amount": 5, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "fluid_gradient": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "stuffing_box_friction": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "dt": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_adjusted_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_net_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_fluid_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "surface_max_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "surface_min_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "tubing_movement": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "surface_stroke_length": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "fillage_percent": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "polished_rod_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "pump_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "SPM": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "fluid_above_pump": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "pump_intake_pressure": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "stroke_production": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "drive_torque_mode": {"last_value": "", "data_type": "int", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "torque_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "speed_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_min_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ "downhole_max_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
+ }
+
+
+total_min_upload_time = 300 # seconds
+dt_channels = { # Current Daily Totals
+ "Average_SPM": {"meshify_channel": "dt_average_spm", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Downhole_Net_Stroke": {"meshify_channel": "dt_downhole_net_stroke", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Electricity_Cost": {"meshify_channel": "dt_electricity_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Fluid_Level": {"meshify_channel": "dt_fluid_level", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Full_Card_Production": {"meshify_channel": "dt_full_card_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Inflow_Rate": {"meshify_channel": "dt_inflow_rate", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "kWh": {"meshify_channel": "dt_kWh", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "kWh_Regen": {"meshify_channel": "dt_kWh_regen", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Lifting_Cost": {"meshify_channel": "dt_lifting_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Peak_Load": {"meshify_channel": "dt_peak_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Min_Load": {"meshify_channel": "dt_min_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Percent_Run": {"meshify_channel": "dt_percent_run", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Polished_Rod_HP": {"meshify_channel": "dt_polished_rod_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Calculated_Production": {"meshify_channel": "dt_calculated_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Projected_Production": {"meshify_channel": "dt_projected_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Pump_HP": {"meshify_channel": "dt_pump_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Pump_Intake_Presure": {"meshify_channel": "dt_pump_intake_pressure", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Surface_Stroke_Length": {"meshify_channel": "dt_surface_stroke_length", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+ "Tubing_Movement": {"meshify_channel": "dt_tubing_movement", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
+}
+
+
+class start(threading.Thread, deviceBase):
+
+ def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
+ threading.Thread.__init__(self)
+ deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
+
+ self.daemon = True
+ self.forceSend = True
+ self.version = "2"
+ # self.device_address = "http://192.168.1.30/"
+ self.device_address = "http://localhost/"
+ self.cardLoopTimer = 600
+ self.finished = threading.Event()
+ threading.Thread.start(self)
+ self.statusChanged = False
+ self.al_status_last = False
+ self.dl_status_last = False
+
+ # load stored event ID's
+ try:
+ with open('eventIds.p', 'rb') as handle:
+ self.eventIds = pickle.load(handle)
+
+ print "found pickled eventID dictionary: {0}".format(self.eventIds)
+ except:
+ print "couldn't load enent ID's from pickle"
+ self.eventIds = []
+
+ # load stored wellconfig's
+ try:
+ with open('wellSetup.p', 'rb') as handle:
+ self.wellSetup = pickle.load(handle)
+
+ print "Found pickled Well Setup (but it's going to be too long to print)"
+ # print self.wellConfig
+ except:
+ print "couldn't load Well Setup from pickle"
+ self.wellSetup = []
+
+ self.sendtodbJSON("device_address", self.device_address, 0)
+
+ # this is a required function for all drivers, its goal is to upload some piece of data
+ # about your device so it can be seen on the web
+ def register(self):
+ channels["status"]["last_value"] = ""
+
+ def run(self):
+ self.runLoopStatus = ""
+ checkBackupEvery = 100
+ checkBackupSkipped = 1
+ while True:
+ try:
+ runLoopStatus = "checkEvents"
+ self.checkEvents()
+
+ runLoopStatus = "checkStatus"
+ self.checkStatus()
+
+ runLoopStatus = "checkDailyTotals"
+ self.checkDailyTotals()
+
+ runLoopStatus = "checkGaugeOffData"
+ self.checkGaugeOffData()
+
+ runLoopStatus = "getDataLoggerStatus()"
+ self.getDataLoggerStatus()
+
+ if self.statusChanged:
+ runLoopStatus = "getLatestXCards"
+ self.getLatestXCards(5)
+ else:
+ runLoopStatus = "checkLatestCard"
+ self.checkLatestCard()
+
+ if self.forceSend or (checkBackupSkipped > checkBackupEvery):
+ runLoopStatus = "checkBackup"
+ self.checkBackup()
+ checkBackupSkipped = 0
+ checkBackupSkipped = checkBackupSkipped + 1
+
+ runLoopStatus = "Complete"
+ time.sleep(3)
+ self.forceSend = False
+ except Exception, e:
+ sleep_timer = 20
+ print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer)
+ time.sleep(sleep_timer)
+
+ def checkBackup(self):
+ backupList = json.loads(requests.get(self.device_address + "/json/backups").text)
+ file = backupList["backups"][0]
+ data = json.loads(requests.get(self.device_address + "/json/backups/" + file).text)
+ timestamp = time.time()
+ if data != self.wellSetup or self.forceSend:
+ self.sendtodbJSON("well_setup", json.dumps(data), timestamp)
+ self.wellSetup = data
+ with open('wellSetup.p', 'wb') as handle:
+ pickle.dump(self.wellSetup, handle)
+
+ def checkEvents(self):
+ data = json.loads(requests.get(self.device_address + "/json/event_list").text)
+ events = data["events"]
+ for event in events:
+ if int(event["id"]) not in self.eventIds:
+ timestamp = calendar.timegm(time.strptime(event["datetime"], '%Y-%m-%dT%H:%M:%S.%fZ'))
+ # we have a new event
+ self.sendtodbJSON("events", json.dumps(event), timestamp)
+ self.eventIds.append(int(event["id"]))
+ if len(self.eventIds) > 50:
+ del self.eventIds[0]
+ with open('eventIds.p', 'wb') as handle:
+ pickle.dump(self.eventIds, handle)
+
+ def checkStatus(self):
+ statusMap = {
+ 0: 'Stopped',
+ 1: 'Running',
+ 2: 'Pumped Off',
+ 3: 'Faulted',
+ 4: 'Starting',
+ 5: 'Recovering',
+ 100: 'Read Error',
+ 1000: 'PLC Error',
+ 9999: 'No Response'
+ }
+ st_response = requests.get(self.device_address + "/json/status")
+ if st_response.status_code == 200:
+ data = json.loads(st_response.text)
+ date = data["ISOdate"]
+ status = statusMap[int(data["status"])]
+
+ if channels["status"]["last_value"] != status:
+ self.statusChanged = True
+ print "Status has changed from {0} to {1} @ {2}".format(channels["status"]["last_value"], status, time.time())
+ else:
+ self.statusChanged = False
+
+ if self.statusChanged or self.forceSend:
+ self.status = status
+ reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z"
+ fd = re.search(reg, date)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
+ # timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
+ timestamp = calendar.timegm(dt.timetuple())
+ self.sendtodb("status", status, timestamp)
+ channels["status"]["last_value"] = status
+
+ def checkDailyTotals(self):
+ data = json.loads(requests.get(self.device_address + "/json/totals").text)
+ total = data["totals"]
+ if total['status'] == "success":
+ timestamp = 0
+ for val in total['values']:
+ # if dt_channels.has_key(val['name']):
+ if val['name'] in dt_channels:
+ if ((time.time() - int(dt_channels[val['name']]['last_time_uploaded'])) > int(dt_channels[val['name']]['min_time_between_uploads'])):
+ if (float(val['value']) >= (float(dt_channels[val['name']]["last_value"]) + float(dt_channels[val['name']]["change_amount"]))) or (float(val['value']) <= (float(dt_channels[val['name']]["last_value"]) - float(dt_channels[val['name']]["change_amount"]))):
+ print("[dailyTotal] {0}: {1}".format(val['name'], val['value']))
+ self.sendtodb(dt_channels[val['name']]["meshify_channel"], float(val['value']), timestamp)
+ dt_channels[val['name']]["last_value"] = float(val['value'])
+ dt_channels[val['name']]["last_time_uploaded"] = time.time()
+ else:
+ print("checkDailyTotalsError: {0}".format(total.message))
+
+ def checkGaugeOffData(self):
+ data = json.loads(requests.get(self.device_address + "/json/history").text)
+ day = data["hist"]
+ date = day['gauge_date']
+ # print day["gauge_date"]
+ # timestamp = time.mktime(time.strptime(day["gauge_date"], '%Y-%m-%dT%H:%M:%S.%fZ'))
+
+ reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})"
+ fd = re.search(reg, date)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)))
+ # timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
+ timestamp = calendar.timegm(dt.timetuple())
+ for entry in day:
+ # if go_channels.has_key(entry):
+ if entry in go_channels:
+ # "percent_run": {"meshifyName": "go_percent_run", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
+ if go_channels[entry]["last_value"] != day[entry]:
+ print entry, day[entry]
+ print go_channels[entry]["meshifyName"], day[entry], timestamp
+ self.sendtodb(go_channels[entry]["meshifyName"], day[entry], timestamp)
+ go_channels[entry]["last_value"] = day[entry]
+
+ def checkLatestCard(self):
+ latest = requests.get(self.device_address + "/json/latest")
+ latest = json.loads(latest.text)
+ folder = str(latest["folder"])
+ file = latest["file"].replace(".csv", "")
+
+ # check the card to see if its new
+ # 1. if its new send the folder/file_name to the card_history channel
+ # 2. if its new and its been 10 minutes since you last sent an entire card, then send up all of the data
+
+ if channels["card_history"]["last_value"] != (folder + "/" + file):
+ # we have a new card
+ # get the data for this event
+ data = json.loads(requests.get(self.device_address + "/json/" + folder + "/" + file).text)
+ dateTime = str(data["card_data"]["Stroke_Time"])
+ # timestamp = time.mktime(time.strptime(dateTime, '%Y-%m-%dT%H:%M:%S.%fZ'))
+
+ reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})"
+ fd = re.search(reg, dateTime)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)))
+ timestamp = calendar.timegm(dt.timetuple())
+ card_timestamp = int(time.mktime(dt.timetuple()))
+
+ print "New card detected @ {0}".format(datetime.strftime(datetime.fromtimestamp(timestamp), "%Y-%m-%d %H:%M:%S.%f"))
+ # set the last value = to current value and upload your data
+ channels["card_history"]["last_value"] = (folder + "/" + file)
+ self.sendtodb("card_history", (folder + "/" + file), timestamp)
+
+ # check the last time the card was updated
+ if (time.time() - int(channels["card_history"]["last_time_uploaded"])) > self.cardLoopTimer or self.statusChanged or self.forceSend:
+ # its been 10 minutes, send the full upload
+ print "Either status has changed or last stored card is too old."
+ channels["card_history"]["last_time_uploaded"] = time.time()
+ self.process_card(data, timestamp, card_timestamp, sendCards=True)
+ return
+ else:
+ self.process_card(data, timestamp, card_timestamp, sendCards=False)
+
+ def process_card(self, data, data_timestamp, card_timestamp, sendCards=False):
+
+ # if sendCards = True then we upload all data no matter what, including cards
+
+ # check what type of data it is
+ # check if its changed, if it has, how long has it been since it changed
+ # NOTE: the initial vaue of "" is given to all channels in the channels object,
+ # so to avoid comparing a string to a float, and to make sure on startup we send all of the values, the first time through we send everything that has a "" as its last value
+
+ # We don't want to store any data on starting, just the cards
+ if self.status != 'Starting':
+ for channel in data["card_data"]:
+ # if channels.has_key(channel):
+ if channel in channels:
+ if channels[channel]["data_type"] == "str":
+ if (data["card_data"][channel] != channels[channel]["last_value"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
+ print "new value for: ", channel
+ print data["card_data"][channel]
+ self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
+ channels[channel]["last_value"] = data["card_data"][channel]
+ channels[channel]["last_time_uploaded"] = time.time()
+ if channels[channel]["data_type"] == "float" or channels[channel]["data_type"] == "int":
+ if channels[channel]["last_value"] == "":
+ # print "first time getting data"
+ print "new value for: ", channel
+ print data["card_data"][channel]
+ self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
+ channels[channel]["last_value"] = data["card_data"][channel]
+ channels[channel]["last_time_uploaded"] = time.time()
+ if (abs(float(data["card_data"][channel]) - float(channels[channel]["last_value"])) > channels[channel]["change_amount"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
+ # print "first time getting data"
+ print "new value for: ", channel
+ print data["card_data"][channel]
+ self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
+ channels[channel]["last_value"] = data["card_data"][channel]
+ channels[channel]["last_time_uploaded"] = time.time()
+
+ if sendCards:
+ s_p = data["card_data"]["Surface_Position"]
+ s_l = data["card_data"]["Surface_Load"]
+ d_p = data["card_data"]["Downhole_Position"]
+ d_l = data["card_data"]["Downhole_Load"]
+ newSc = "["
+ newDc = "["
+
+ for i in range(len(s_p)):
+ try:
+ if s_p[i] is None:
+ continue
+ if s_p[i] != 0.0 and s_l[i] != 0.0:
+ newSc += "[" + str(s_p[i]) + ", " + str(s_l[i]) + "], "
+ except:
+ pass
+ newSc += "[" + str(s_p[0]) + ", " + str(s_l[0]) + "]"
+ newSc += "]"
+
+ for i in range(len(d_p)):
+ try:
+ if d_p[i] is None:
+ continue
+ if d_p[i] != 0.0 and d_l[i] != 0.0:
+ newDc += "[" + str(d_p[i]) + ", " + str(d_l[i]) + "], "
+ except:
+ pass
+ newDc += "[" + str(d_p[0]) + ", " + str(d_l[0]) + "]"
+ newDc += "]"
+
+ self.sendtodb("sc", newSc, card_timestamp)
+ self.sendtodb("dc", newDc, card_timestamp)
+
+ def getLatestXCards(self, numCards):
+ data = json.loads(requests.get(self.device_address + "/json/latest/" + str(int(numCards))).text)
+ for card in data['cards']:
+ card_data = json.loads(requests.get(self.device_address + "/json/" + data['folder'] + "/" + card).text)
+ dateTime = str(card_data["card_data"]["Stroke_Time"])
+ # timestamp = time.mktime(time.strptime(dateTime,'%Y-%m-%dT%H:%M:%S.%fZ'))
+ reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})"
+ fd = re.search(reg, dateTime)
+ dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)))
+ timestamp = calendar.timegm(dt.timetuple())
+ card_timestamp = int(time.mktime(dt.timetuple()))
+
+ channels["card_history"]["last_value"] = (data['folder'] + "/" + card)
+ self.sendtodb("card_history", (data['folder'] + "/" + card), card_timestamp)
+ self.process_card(card_data, timestamp, card_timestamp, sendCards=True)
+
+ def getDataLoggerStatus(self):
+ data = json.loads(requests.get(self.device_address + "/json/pythonstatus/").text)
+ al_status = "Not OK"
+ if data['status']['alarmLogger']:
+ al_status = "OK"
+
+ if al_status != self.al_status_last:
+ self.sendtodb("alarmlogger_status", al_status, 0)
+ self.al_status_last = al_status
+
+ dl_status = "Not OK"
+ if data['status']['dataLogger']:
+ dl_status = "OK"
+ if al_status != self.dl_status_last:
+ self.sendtodb("datalogger_status", dl_status, 0)
+ self.dl_status_last = dl_status
+
+ def poc_get_card(self, name, value):
+ self.getcard(value)
+
+ def poc_sync(self, name, value):
+ self.sendtodb("connected", "true", 0)
+ return True
+
+ def poc_set_address(self, name, value):
+ self.device_address = value
+ return True
+
+ def poc_refresh_data(self, name, value):
+ self.forceSend = True
+ return True