Fixes POCloud driver, increments version. adds modbus Map
This commit is contained in:
100
POCloud/.vscode/.ropeproject/config.py
vendored
Normal file
100
POCloud/.vscode/.ropeproject/config.py
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
# The default ``config.py``
|
||||
# flake8: noqa
|
||||
|
||||
|
||||
def set_prefs(prefs):
|
||||
"""This function is called before opening the project"""
|
||||
|
||||
# Specify which files and folders to ignore in the project.
|
||||
# Changes to ignored resources are not added to the history and
|
||||
# VCSs. Also they are not returned in `Project.get_files()`.
|
||||
# Note that ``?`` and ``*`` match all characters but slashes.
|
||||
# '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
|
||||
# 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
|
||||
# '.svn': matches 'pkg/.svn' and all of its children
|
||||
# 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
|
||||
# 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
|
||||
prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
|
||||
'.hg', '.svn', '_svn', '.git', '.tox']
|
||||
|
||||
# Specifies which files should be considered python files. It is
|
||||
# useful when you have scripts inside your project. Only files
|
||||
# ending with ``.py`` are considered to be python files by
|
||||
# default.
|
||||
#prefs['python_files'] = ['*.py']
|
||||
|
||||
# Custom source folders: By default rope searches the project
|
||||
# for finding source folders (folders that should be searched
|
||||
# for finding modules). You can add paths to that list. Note
|
||||
# that rope guesses project source folders correctly most of the
|
||||
# time; use this if you have any problems.
|
||||
# The folders should be relative to project root and use '/' for
|
||||
# separating folders regardless of the platform rope is running on.
|
||||
# 'src/my_source_folder' for instance.
|
||||
#prefs.add('source_folders', 'src')
|
||||
|
||||
# You can extend python path for looking up modules
|
||||
#prefs.add('python_path', '~/python/')
|
||||
|
||||
# Should rope save object information or not.
|
||||
prefs['save_objectdb'] = True
|
||||
prefs['compress_objectdb'] = False
|
||||
|
||||
# If `True`, rope analyzes each module when it is being saved.
|
||||
prefs['automatic_soa'] = True
|
||||
# The depth of calls to follow in static object analysis
|
||||
prefs['soa_followed_calls'] = 0
|
||||
|
||||
# If `False` when running modules or unit tests "dynamic object
|
||||
# analysis" is turned off. This makes them much faster.
|
||||
prefs['perform_doa'] = True
|
||||
|
||||
# Rope can check the validity of its object DB when running.
|
||||
prefs['validate_objectdb'] = True
|
||||
|
||||
# How many undos to hold?
|
||||
prefs['max_history_items'] = 32
|
||||
|
||||
# Shows whether to save history across sessions.
|
||||
prefs['save_history'] = True
|
||||
prefs['compress_history'] = False
|
||||
|
||||
# Set the number spaces used for indenting. According to
|
||||
# :PEP:`8`, it is best to use 4 spaces. Since most of rope's
|
||||
# unit-tests use 4 spaces it is more reliable, too.
|
||||
prefs['indent_size'] = 4
|
||||
|
||||
# Builtin and c-extension modules that are allowed to be imported
|
||||
# and inspected by rope.
|
||||
prefs['extension_modules'] = []
|
||||
|
||||
# Add all standard c-extensions to extension_modules list.
|
||||
prefs['import_dynload_stdmods'] = True
|
||||
|
||||
# If `True` modules with syntax errors are considered to be empty.
|
||||
# The default value is `False`; When `False` syntax errors raise
|
||||
# `rope.base.exceptions.ModuleSyntaxError` exception.
|
||||
prefs['ignore_syntax_errors'] = False
|
||||
|
||||
# If `True`, rope ignores unresolvable imports. Otherwise, they
|
||||
# appear in the importing namespace.
|
||||
prefs['ignore_bad_imports'] = False
|
||||
|
||||
# If `True`, rope will insert new module imports as
|
||||
# `from <package> import <module>` by default.
|
||||
prefs['prefer_module_from_imports'] = False
|
||||
|
||||
# If `True`, rope will transform a comma list of imports into
|
||||
# multiple separate import statements when organizing
|
||||
# imports.
|
||||
prefs['split_imports'] = False
|
||||
|
||||
# If `True`, rope will sort imports alphabetically by module name
|
||||
# instead of alphabetically by import statement, with from imports
|
||||
# after normal imports.
|
||||
prefs['sort_imports_alphabetically'] = False
|
||||
|
||||
|
||||
def project_opened(project):
|
||||
"""This function is called after opening the project"""
|
||||
# Do whatever you like here!
|
||||
BIN
POCloud/.vscode/.ropeproject/objectdb
vendored
Normal file
BIN
POCloud/.vscode/.ropeproject/objectdb
vendored
Normal file
Binary file not shown.
12
POCloud/config.txt
Normal file
12
POCloud/config.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
|
||||
{
|
||||
|
||||
"driverFileName":"poc.py",
|
||||
"deviceName":"poc",
|
||||
"driverId":"0050",
|
||||
"releaseVersion":"5",
|
||||
"files": {
|
||||
"file1":"poc.py",
|
||||
"file2":"modbusMap.p"}
|
||||
|
||||
}
|
||||
4318
POCloud/modbusMap.p
Normal file
4318
POCloud/modbusMap.p
Normal file
File diff suppressed because it is too large
Load Diff
662
POCloud/poc.py
662
POCloud/poc.py
@@ -1,95 +1,115 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import traceback
|
||||
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
from device_base import deviceBase
|
||||
from datetime import datetime
|
||||
import requests
|
||||
import json
|
||||
import calendar
|
||||
import pickle
|
||||
from dateutil import tz
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
from requests.packages.urllib3.exceptions import InsecurePlatformWarning
|
||||
from device_base import deviceBase
|
||||
import traceback
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from collections import deque
|
||||
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
requests.packages.urllib3.disable_warnings(InsecurePlatformWarning)
|
||||
PLC_IP_ADDRESS = '192.168.1.10'
|
||||
|
||||
API_HTTP_TYPE = "https"
|
||||
API_DEVICE_ADDRESS = "192.168.1.30"
|
||||
API_DEVICE_PORT = 5000
|
||||
|
||||
API_BASE_URL = "{}://{}:{}".format(API_HTTP_TYPE, API_DEVICE_ADDRESS, API_DEVICE_PORT)
|
||||
def read_tag(addr, tag):
|
||||
c = ClxDriver()
|
||||
if c.open(addr):
|
||||
try:
|
||||
v = c.read_tag(tag)
|
||||
# print(v)
|
||||
return v
|
||||
except Exception:
|
||||
err = c.get_status()
|
||||
c.close()
|
||||
print err
|
||||
pass
|
||||
c.close()
|
||||
|
||||
go_channels = {
|
||||
'spm_average': {'channel': 'go_average_spm', 'last_value_sent': None},
|
||||
'downhole_gross_stroke_average': {'channel': 'go_downhole_gross_stroke', 'last_value_sent': None}, # TODO: ADD
|
||||
'downhole_net_stroke_average': {'channel': 'go_downhole_net_stroke', 'last_value_sent': None}, # TODO: ADD
|
||||
'electricity_cost_total': {'channel': 'go_electricity_cost', 'last_value_sent': None},
|
||||
'fluid_level_average': {'channel': 'go_fluid_above_pump', 'last_value_sent': None},
|
||||
'inflow_rate_average': {'channel': 'go_inflow_rate', 'last_value_sent': None},
|
||||
'kWh_used_total': {'channel': 'go_kwh', 'last_value_sent': None},
|
||||
'kWh_regen_total': {'channel': 'go_kwh_regen', 'last_value_sent': None},
|
||||
'lifting_cost_average': {'channel': 'go_lifting_cost', 'last_value_sent': None},
|
||||
'peak_pr_load': {'channel': 'go_peak_load', 'last_value_sent': None},
|
||||
'min_pr_load': {'channel': 'go_min_load', 'last_value_sent': None},
|
||||
'percent_run': {'channel': 'go_percent_run', 'last_value_sent': None},
|
||||
'polished_rod_hp_average': {'channel': 'go_polished_rod_hp', 'last_value_sent': None},
|
||||
'pump_hp_average': {'channel': 'go_pump_hp', 'last_value_sent': None}, # TODO: ADD
|
||||
'production_total': {'channel': 'go_production_calculated', 'last_value_sent': None},
|
||||
'pump_intake_pressure_average': {'channel': 'go_pump_intake_pressure', 'last_value_sent': None},
|
||||
'surface_stroke_length_average': {'channel': 'go_surface_stroke_length', 'last_value_sent': None}, #TODO: ADD
|
||||
'tubing_movement_average': {'channel': "go_tubing_movement", 'last_value_sent': None}, #TODO: ADD
|
||||
}
|
||||
|
||||
tag_channels = {
|
||||
'Polished Rod HP': {'channel': 'polished_rod_hp', 'last_value_sent': None},
|
||||
'Peak Downhole Load': {'channel': 'downhole_peak_load', 'last_value_sent': None}, #TODO: ADD
|
||||
'Gross Stroke Length': {'channel': 'downhole_gross_stroke', 'last_value_sent': None},
|
||||
'Stroke Speed': {'channel': 'SPM', 'last_value_sent': None},
|
||||
'Tubing Head Pressure': {'channel': 'tubing_head_pressure', 'last_value_sent': None},
|
||||
'Minimum Polished Rod Load': {'channel': 'surface_min_load', 'last_value_sent': None},
|
||||
'Fluid Load': {'channel': 'downhole_fluid_load', 'last_value_sent': None},
|
||||
'Downhole Max. Position': {'channel': 'downhole_max_position', 'last_value_sent': None},
|
||||
'Downhole Net Stroke': {'channel': 'downhole_net_stroke', 'last_value_sent': None},
|
||||
'Pump Fill Percent': {'channel': 'fillage_percent', 'last_value_sent': None},
|
||||
'Downhole Pump HP': {'channel': 'pump_hp', 'last_value_sent': None},
|
||||
'Surface Min. Position': {'channel': 'surface_min_position', 'last_value_sent': None}, #TODO: ADD
|
||||
'Pump Intake Pressure': {'channel': 'pump_intake_pressure', 'last_value_sent': None},
|
||||
'Surface Max. Position': {'channel': 'surface_max_position', 'last_value_sent': None}, #TODO: ADD
|
||||
'Tubing Movement': {'channel': 'tubing_movement', 'last_value_sent': None},
|
||||
'Downhole Min. Position': {'channel': 'downhole_min_position', 'last_value_sent': None},
|
||||
'Peak Polished Rod Load': {'channel': 'surface_max_load', 'last_value_sent': None},
|
||||
'Minimum Downhole Load': {'channel': 'downhole_min_load', 'last_value_sent': None}, #TODO: ADD
|
||||
'Surface Stroke Length': {'channel': 'surface_stroke_length', 'last_value_sent': None},
|
||||
'Downhole Adjusted Gross Stroke': {'channel': 'downhole_adjusted_gross_stroke', 'last_value_sent': None},
|
||||
'Fluid Level': {'channel': 'fluid_above_pump', 'last_value_sent': None},
|
||||
'Stroke Production': {'channel': 'stroke_production', 'last_value_sent': None}
|
||||
}
|
||||
def read_array(addr, tag, start, end):
|
||||
c = ClxDriver()
|
||||
if c.open(addr):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
v = c.read_tag(tag_w_index)
|
||||
# print('{} - {}'.format(tag_w_index, v))
|
||||
arr_vals.append(round(v[0], 4))
|
||||
# print(v)
|
||||
if arr_vals:
|
||||
return arr_vals
|
||||
else:
|
||||
print("No length for {}".format(addr))
|
||||
return False
|
||||
except Exception:
|
||||
err = c.get_status()
|
||||
c.close()
|
||||
print err
|
||||
pass
|
||||
c.close()
|
||||
|
||||
dt_channels = { # Current Daily Totals
|
||||
'Electricity Cost': {'channel': 'dt_electricity_cost', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Inflow Rate': {'channel': 'dt_inflow_rate', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Energy Regen': {'channel': 'dt_kWh_regen', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Min Load': {'channel': 'dt_min_load', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Polished Rod HP': {'channel': 'dt_polished_rod_hp', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Calculated Production': {'channel': 'dt_calculated_production', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Projected Production': {'channel': 'dt_projected_production', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Pump HP': {'channel': 'dt_pump_hp', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Pump Intake Presure': {'channel': 'dt_pump_intake_pressure', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Surface Stroke Length': {'channel': 'dt_surface_stroke_length', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Tubing Movement': {'channel': 'dt_tubing_movement', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Downhole Net Stroke': {'channel': 'dt_downhole_net_stroke', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Average SPM': {'channel': 'dt_average_spm', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Peak Load': {'channel': 'dt_peak_load', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'kWh': {'channel': 'dt_kWh', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Percent Run': {'channel': 'dt_percent_run', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Fluid Level': {'channel': 'dt_fluid_level', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Lifting Cost': {'channel': 'dt_lifting_cost', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Full Card Production': {'channel': 'dt_full_card_production', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
}
|
||||
|
||||
def write_tag(addr, tag, val):
|
||||
c = ClxDriver()
|
||||
if c.open(addr):
|
||||
try:
|
||||
# typ = getTagType(addr, tag)
|
||||
cv = c.read_tag(tag)
|
||||
wt = c.write_tag(tag, val, cv[1])
|
||||
# print(wt)
|
||||
return wt
|
||||
except Exception:
|
||||
err = c.get_status()
|
||||
c.close()
|
||||
print err
|
||||
c.close()
|
||||
|
||||
|
||||
class Card():
|
||||
global PLC_IP_ADDRESS
|
||||
|
||||
def __init__(self, unified_time):
|
||||
self.sc = []
|
||||
self.dc = []
|
||||
self.sent = False
|
||||
self.read_time = unified_time
|
||||
self.readCard()
|
||||
|
||||
def readCard(self):
|
||||
self.card_id = read_tag(PLC_IP_ADDRESS, "Card_Past[1].ID")[0]
|
||||
self.num_points = int(read_tag(PLC_IP_ADDRESS, "Card_Past[1].Num_Points")[0])
|
||||
print("reading {} from card ID {}".format(self.num_points, self.card_id))
|
||||
|
||||
if self.num_points > 1:
|
||||
|
||||
surf_pos = read_array(PLC_IP_ADDRESS, 'Card_Past[1].Surface_Position', 1, self.num_points)
|
||||
surf_lod = read_array(PLC_IP_ADDRESS, 'Card_Past[1].Surface_Load', 1, self.num_points)
|
||||
down_pos = read_array(PLC_IP_ADDRESS, 'Card_Past[1].Downhole_Position', 1, self.num_points)
|
||||
down_lod = read_array(PLC_IP_ADDRESS, 'Card_Past[1].Downhole_Load', 1, self.num_points)
|
||||
if surf_pos and surf_lod and down_pos and down_lod:
|
||||
for i in range(0, self.num_points-1):
|
||||
if not (surf_pos[i] == 0.0) and not (surf_lod[i] == 0.0):
|
||||
self.sc.append([surf_pos[i], surf_lod[i]])
|
||||
if not (down_pos[i] == 0.0) and not (down_lod[i] == 0.0):
|
||||
self.dc.append([down_pos[i], down_lod[i]])
|
||||
return True
|
||||
else:
|
||||
print("couldn't get a full set of position/load pairs")
|
||||
return False
|
||||
|
||||
def stringify(self):
|
||||
''' returns a list of two strings [surface card, downhole card]'''
|
||||
sc_str = "["
|
||||
dc_str = "["
|
||||
for i in range(0, len(self.sc)):
|
||||
sc_str = sc_str + "[{},{}],".format(self.sc[i][0], self.sc[i][1])
|
||||
sc_str = sc_str + "[{},{}]]".format(self.sc[0][0], self.sc[0][1])
|
||||
for j in range(0, len(self.dc)):
|
||||
dc_str = dc_str + "[{},{}],".format(self.dc[j][0], self.dc[j][1])
|
||||
dc_str = dc_str + "[{},{}]]".format(self.dc[0][0], self.dc[0][1])
|
||||
return[sc_str, dc_str]
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
@@ -99,28 +119,17 @@ class start(threading.Thread, deviceBase):
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.forceSend = True
|
||||
self.version = "3"
|
||||
self.device_address = "http://192.168.1.30"
|
||||
# self.device_address = "http://localhost"
|
||||
self.forceSend = False
|
||||
self.version = "4"
|
||||
self.device_address = "http://192.168.1.30/"
|
||||
self.cardLoopTimer = 600
|
||||
self.finished = threading.Event()
|
||||
threading.Thread.start(self)
|
||||
self.status_changed = False
|
||||
|
||||
self.last_card_send_time = 0
|
||||
self.statusChanged = False
|
||||
self.al_status_last = False
|
||||
self.dl_status_last = False
|
||||
|
||||
# load stored Run Status ID's
|
||||
try:
|
||||
with open('runstatusIds.p', 'rb') as handle:
|
||||
self.runstatusIds = pickle.load(handle)
|
||||
|
||||
print "found pickled Run Status ID dictionary: {0}".format(self.runstatusIds)
|
||||
except:
|
||||
print "couldn't load Run Status ID's from pickle"
|
||||
self.runstatusIds = []
|
||||
self.card_storage_limit = 5
|
||||
self.last_card_sent_time = 0
|
||||
|
||||
# load stored event ID's
|
||||
try:
|
||||
@@ -129,430 +138,85 @@ class start(threading.Thread, deviceBase):
|
||||
|
||||
print "found pickled eventID dictionary: {0}".format(self.eventIds)
|
||||
except:
|
||||
print "couldn't load event ID's from pickle"
|
||||
print "couldn't load enent ID's from pickle"
|
||||
self.eventIds = []
|
||||
|
||||
# load stored Well Test ID's
|
||||
# load stored wellconfig's
|
||||
try:
|
||||
with open('welltestIDs.p', 'rb') as handle:
|
||||
self.welltestIDs = pickle.load(handle)
|
||||
with open('wellSetup.p', 'rb') as handle:
|
||||
self.wellSetup = pickle.load(handle)
|
||||
|
||||
print "found pickled welltestIDs dictionary: {0}".format(self.welltestIDs)
|
||||
print "Found pickled Well Setup (but it's going to be too long to print)"
|
||||
# print self.wellConfig
|
||||
except:
|
||||
print "couldn't load well test ID's from pickle"
|
||||
self.welltestIDs = []
|
||||
print "couldn't load Well Setup from pickle"
|
||||
self.wellSetup = []
|
||||
|
||||
# load stored Gauge Off ID's
|
||||
try:
|
||||
with open('gaugeoffIds.p', 'rb') as handle:
|
||||
self.gaugeoffIds = pickle.load(handle)
|
||||
|
||||
print "found pickled gaugeoffIds dictionary: {0}".format(self.gaugeoffIds)
|
||||
except:
|
||||
print "couldn't load gauge off ID's from pickle"
|
||||
self.gaugeoffIds = []
|
||||
|
||||
# load stored Fluid Shot ID's
|
||||
try:
|
||||
with open('fluidshotIds.p', 'rb') as handle:
|
||||
self.fluidshotIds = pickle.load(handle)
|
||||
|
||||
print "found pickled fluidshotIDs dictionary: {0}".format(self.fluidshotIds)
|
||||
except:
|
||||
print "couldn't load fluid shot ID's from pickle"
|
||||
self.fluidshotIds = []
|
||||
|
||||
# load stored note ID's
|
||||
try:
|
||||
with open('noteIDs.p', 'rb') as handle:
|
||||
self.noteIDs = pickle.load(handle)
|
||||
|
||||
print "found pickled noteID dictionary: {0}".format(self.noteIDs)
|
||||
except:
|
||||
print "couldn't load note ID's from pickle"
|
||||
self.noteIDs = []
|
||||
|
||||
# load stored last_card_id
|
||||
try:
|
||||
with open('last_card_id.p', 'rb') as handle:
|
||||
self.last_card_id = pickle.load(handle)
|
||||
|
||||
print "found pickled last_card_id: {0}".format(self.last_card_id)
|
||||
except:
|
||||
print "couldn't load last_card_id from pickle"
|
||||
self.last_card_id = 0
|
||||
self.sendtodbJSON("device_address", self.device_address, 0)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
channels["status"]["last_value"] = ""
|
||||
pass
|
||||
|
||||
|
||||
def run(self):
|
||||
self.last_status = ""
|
||||
runLoopStatus = "Startup"
|
||||
self.runLoopStatus = ""
|
||||
card_storage = deque([]) # array of the last x cards
|
||||
while True:
|
||||
if self.forceSend:
|
||||
print "FORCE SEND: TRUE"
|
||||
try:
|
||||
if self.forceSend:
|
||||
print("!!!!!!!!!!!!!!! FORCE SEND !!!!!!!!!!!!!!!")
|
||||
self.statusChanged = False
|
||||
|
||||
runLoopStatus = "checkStatus"
|
||||
chk_status = self.checkStatus(self.last_status)
|
||||
if chk_status:
|
||||
self.last_status = chk_status
|
||||
self.status_changed = True
|
||||
|
||||
runLoopStatus = "checkEvents"
|
||||
self.checkEvents()
|
||||
|
||||
runLoopStatus = "checkNotes"
|
||||
self.checkNotes()
|
||||
|
||||
runLoopStatus = "checkWellTests"
|
||||
self.checkWellTests()
|
||||
|
||||
runLoopStatus = "checkFluidShots"
|
||||
self.checkFluidShots()
|
||||
|
||||
runLoopStatus = "checkDailyTotals"
|
||||
self.checkDailyTotals()
|
||||
|
||||
runLoopStatus = "checkGaugeOffData"
|
||||
self.checkGaugeOffData()
|
||||
|
||||
runLoopStatus = "checkStoredValues"
|
||||
self.checkStoredValues()
|
||||
|
||||
# runLoopStatus = "getDataLoggerStatus()"
|
||||
# self.getDataLoggerStatus()
|
||||
|
||||
if self.status_changed:
|
||||
runLoopStatus = "getLatestXCards"
|
||||
self.forceSend = True
|
||||
self.checkLatestCard(numCards=5)
|
||||
runLoopStatus = "Reading Cards"
|
||||
if len(card_storage) > 0:
|
||||
if not read_tag(PLC_IP_ADDRESS, "Card_Past[1].ID")[0] == card_storage[0].card_id:
|
||||
current_time = time.time()
|
||||
current_card = Card(current_time)
|
||||
self.sendtodbJSON("card_history", current_card.card_id, current_time)
|
||||
if (current_card.read_time - self.last_card_sent_time) > self.cardLoopTimer or self.forceSend:
|
||||
cards = current_card.stringify()
|
||||
# self.sendtodbJSON("sc", cards[0], current_time)
|
||||
# self.sendtodbJSON("dc", cards[1], current_time)
|
||||
self.sendtodbDev(1, 'sc', cards[0], current_time, "poc")
|
||||
self.sendtodbDev(1, 'dc', cards[1], current_time, "poc")
|
||||
self.last_card_sent_time = time.time()
|
||||
current_card.sent = True
|
||||
card_storage.appendleft(current_card)
|
||||
while len(card_storage) > self.card_storage_limit:
|
||||
card_storage.pop()
|
||||
if self.statusChanged:
|
||||
for c in card_storage:
|
||||
if not c.sent:
|
||||
cstr = c.stringify()
|
||||
# self.sendtodbJSON("sc", cstr[0], c.read_time)
|
||||
# self.sendtodbJSON("dc", cstr[1], c.read_time)
|
||||
self.sendtodbDev(1, 'sc', cstr[0], c.read_time, "poc")
|
||||
self.sendtodbDev(1, 'dc', cstr[1], c.read_time, "poc")
|
||||
self.last_card_sent_time = time.time()
|
||||
else:
|
||||
runLoopStatus = "checkLatestCard"
|
||||
self.checkLatestCard()
|
||||
|
||||
# if self.forceSend or (checkBackupSkipped > checkBackupEvery):
|
||||
# runLoopStatus = "checkBackup"
|
||||
# self.checkBackup()
|
||||
# checkBackupSkipped = 0
|
||||
# checkBackupSkipped = checkBackupSkipped + 1
|
||||
|
||||
current_time = time.time()
|
||||
current_card = Card(current_time)
|
||||
self.sendtodbJSON("card_history", current_card.card_id, current_time)
|
||||
if (current_card.read_time - self.last_card_sent_time) > self.cardLoopTimer or self.forceSend:
|
||||
cards = current_card.stringify()
|
||||
# self.sendtodbJSON("sc", cards[0], current_time)
|
||||
# self.sendtodbJSON("dc", cards[1], current_time)
|
||||
self.sendtodbDev(1, 'sc', cards[0], current_time, "poc")
|
||||
self.sendtodbDev(1, 'dc', cards[1], current_time, "poc")
|
||||
self.last_card_sent_time = time.time()
|
||||
current_card.sent = True
|
||||
card_storage.appendleft(current_card)
|
||||
runLoopStatus = "Complete"
|
||||
time.sleep(10)
|
||||
time.sleep(3)
|
||||
self.forceSend = False
|
||||
except Exception, e:
|
||||
sleep_timer = 20
|
||||
print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer)
|
||||
print("Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer))
|
||||
traceback.print_exc()
|
||||
time.sleep(sleep_timer)
|
||||
|
||||
def checkStatus(self, last_status):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/run_status_log?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.runstatusIds:
|
||||
new_status = req_data['objects'][i]["run_status"]
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S.%f').timetuple())
|
||||
self.sendtodb('status', new_status, timestamp)
|
||||
self.runstatusIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.runstatusIds) > 20:
|
||||
del self.runstatusIds[0]
|
||||
with open('runstatusIds.p', 'wb') as handle:
|
||||
pickle.dump(self.runstatusIds, handle)
|
||||
|
||||
if req_data['objects'][-1:][0]['run_status'] != last_status:
|
||||
print "Status has changed from {0} to {1} @ {2}".format(last_status, req_data['objects'][-1:][0]['run_status'], req_data['objects'][-1:][0]['created_on'])
|
||||
return req_data['objects'][-1:][0]['run_status']
|
||||
except Exception as e:
|
||||
print "Error during checkStatus..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkEvents(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/events?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get( url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.eventIds:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S.%f').timetuple())
|
||||
self.sendtodbJSON('events', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.eventIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.eventIds) > 20:
|
||||
del self.eventIds[0]
|
||||
with open('eventIds.p', 'wb') as handle:
|
||||
pickle.dump(self.eventIds, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkEvents..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkNotes(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/notes?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get( url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.noteIDs:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S').timetuple())
|
||||
self.sendtodbJSON('notes', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.noteIDs.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.noteIDs) > 20:
|
||||
del self.noteIDs[0]
|
||||
with open('noteIDs.p', 'wb') as handle:
|
||||
pickle.dump(self.noteIDs, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkNotes..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkFluidShots(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/fluid_shots?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.fluidshotIds:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S').timetuple())
|
||||
self.sendtodbJSON('fluidshots', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.fluidshotIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.fluidshotIds) > 20:
|
||||
del self.fluidshotIds[0]
|
||||
with open('fluidshotIds.p', 'wb') as handle:
|
||||
pickle.dump(self.fluidshotIds, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkFluidShots..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkWellTests(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/well_test?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.welltestIDs:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S').timetuple())
|
||||
self.sendtodbJSON('welltests', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.welltestIDs.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.welltestIDs) > 20:
|
||||
del self.welltestIDs[0]
|
||||
with open('welltestIDs.p', 'wb') as handle:
|
||||
pickle.dump(self.welltestIDs, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkEvents..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkDailyTotals(self):
|
||||
global API_BASE_URL, dt_channels
|
||||
try:
|
||||
api_req = requests.get("{}/api/today_totals".format(API_BASE_URL), verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
for i in range(0, len(req_data)):
|
||||
if req_data[i]['name'] in dt_channels:
|
||||
if dt_channels[req_data[i]['name']]['last_value_sent'] is None:
|
||||
self.sendtodb(dt_channels[req_data[i]['name']]['channel'], req_data[i]['value'], 0)
|
||||
dt_channels[req_data[i]['name']]['last_value_sent'] = req_data[i]['value']
|
||||
dt_channels[req_data[i]['name']]['last_send_ts'] = time.time()
|
||||
elif abs(dt_channels[req_data[i]['name']]['last_value_sent'] - req_data[i]['value']) > dt_channels[req_data[i]['name']]['change_threshold']:
|
||||
self.sendtodb(dt_channels[req_data[i]['name']]['channel'], req_data[i]['value'], 0)
|
||||
dt_channels[req_data[i]['name']]['last_value_sent'] = req_data[i]['value']
|
||||
dt_channels[req_data[i]['name']]['last_send_ts'] = time.time()
|
||||
elif time.time() - dt_channels[req_data[i]['name']]['last_send_ts'] > 3600: # Send values every hour
|
||||
self.sendtodb(dt_channels[req_data[i]['name']]['channel'], req_data[i]['value'], 0)
|
||||
dt_channels[req_data[i]['name']]['last_value_sent'] = req_data[i]['value']
|
||||
dt_channels[req_data[i]['name']]['last_send_ts'] = time.time()
|
||||
except Exception as e:
|
||||
print "Error during checkDailyTotals..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def checkGaugeOffData(self):
|
||||
global API_BASE_URL, go_channels
|
||||
try:
|
||||
url = API_BASE_URL + '/api/gauge_off?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url , verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.gaugeoffIds:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S.%f').timetuple())
|
||||
for col_name in req_data['objects'][i]:
|
||||
if col_name in go_channels:
|
||||
self.sendtodb(go_channels[col_name]['channel'], req_data['objects'][i][col_name], timestamp)
|
||||
|
||||
self.gaugeoffIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.gaugeoffIds) > 20:
|
||||
del self.gaugeoffIds[0]
|
||||
with open('gaugeoffIds.p', 'wb') as handle:
|
||||
pickle.dump(self.gaugeoffIds, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkGaugeOffData..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkStoredValues(self):
|
||||
global API_BASE_URL, tag_channels
|
||||
try:
|
||||
api_req = requests.get( '{}/api/latest'.format(API_BASE_URL), verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
for i in range(0, len(req_data)):
|
||||
if req_data[i]['tag_name'] in tag_channels:
|
||||
if tag_channels[req_data[i]['tag_name']]['last_value_sent'] is None:
|
||||
self.sendtodb(tag_channels[req_data[i]['tag_name']]['channel'], req_data[i]['value'], calendar.timegm(datetime.strptime(req_data[i]['datetime'], '%Y-%m-%d %H:%M:%S.%f').timetuple()))
|
||||
tag_channels[req_data[i]['tag_name']]['last_value_sent'] = req_data[i]['value']
|
||||
elif req_data[i]['value'] != tag_channels[req_data[i]['tag_name']]['last_value_sent']:
|
||||
self.sendtodb(tag_channels[req_data[i]['tag_name']]['channel'], req_data[i]['value'], calendar.timegm(datetime.strptime(req_data[i]['datetime'], '%Y-%m-%d %H:%M:%S.%f').timetuple()))
|
||||
tag_channels[req_data[i]['tag_name']]['last_value_sent'] = req_data[i]['value']
|
||||
except Exception as e:
|
||||
print "Error during checkStoredValues..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
|
||||
def checkLatestCard(self, numCards = 1):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/cards?q={"order_by":[{"field":"created_on","direction":"desc"}], "limit":' + str(numCards) + "}"
|
||||
api_req = requests.get(url, verify=False)
|
||||
req_data = json.loads(api_req.text)['objects']
|
||||
|
||||
utc_tz = tz.tzutc()
|
||||
local_tz = tz.tzlocal()
|
||||
|
||||
# check the card to see if its new
|
||||
# 1. if its new send the folder/file_name to the card_history channel
|
||||
# 2. if its new and its been 10 minutes since you last sent an entire card, then send up all of the data
|
||||
for i in range(0, len(req_data)):
|
||||
current_card = req_data[i]
|
||||
if current_card['_id'] > self.last_card_id:
|
||||
#2016-11-23T00:37:02.806026
|
||||
dt = datetime.strptime(current_card['created_on'], '%Y-%m-%dT%H:%M:%S.%f')
|
||||
dt_utc = dt.replace(tzinfo=utc_tz)
|
||||
dt_local = dt_utc.astimezone(tz.tzlocal())
|
||||
|
||||
timestamp_utc = calendar.timegm(dt_utc.timetuple())
|
||||
timestamp_local = calendar.timegm(dt_local.timetuple())
|
||||
|
||||
print "New card detected @ {0}".format(datetime.strftime(dt_local, "%Y-%m-%dT%H:%M:%S.%f"))
|
||||
# set the last value = to current value and upload your data
|
||||
self.sendtodb("card_history", current_card['_id'], timestamp_utc)
|
||||
self.last_card_id = current_card['_id']
|
||||
with open('last_card_id.p', 'wb') as handle:
|
||||
pickle.dump(self.last_card_id, handle)
|
||||
|
||||
# check the last time the card was updated
|
||||
if (time.time() - self.last_card_send_time) > self.cardLoopTimer or self.status_changed or self.forceSend:
|
||||
# its been 10 minutes, send the full upload
|
||||
print "Either status has changed or last stored card is too old."
|
||||
self.sendtodb("cardtype", current_card['stroke_type'], int(timestamp_utc))
|
||||
|
||||
# TODO: FIX CARD PARSING
|
||||
s_p = current_card["surf_pos"].replace("[", "").replace("]", "").split(", ")
|
||||
s_l = current_card["surf_lod"].replace("[", "").replace("]", "").split(", ")
|
||||
d_p = current_card["down_pos"].replace("[", "").replace("]", "").split(", ")
|
||||
d_l = current_card["down_lod"].replace("[", "").replace("]", "").split(", ")
|
||||
newSc = "["
|
||||
newDc = "["
|
||||
|
||||
for i in range(len(s_p)):
|
||||
try:
|
||||
if s_p[i] is None:
|
||||
continue
|
||||
if s_p[i] != 0.0 and s_l[i] != 0.0:
|
||||
newSc += "[" + str(round(float(s_p[i]), 3)) + "," + str(round(float(s_l[i]), 3)) + "],"
|
||||
except:
|
||||
pass
|
||||
newSc = newSc[:-1] + "]"
|
||||
|
||||
for i in range(len(d_p)):
|
||||
try:
|
||||
if d_p[i] is None:
|
||||
continue
|
||||
if d_p[i] != 0.0 and d_l[i] != 0.0:
|
||||
newDc += "[" + str(round(float(d_p[i]), 3)) + "," + str(round(float(d_l[i]), 3)) + "],"
|
||||
except:
|
||||
pass
|
||||
newDc = newDc[:-1] + "]"
|
||||
|
||||
self.sendtodb("surf_pos", current_card["surf_pos"], timestamp_utc)
|
||||
self.sendtodb("surf_lod", current_card["surf_lod"], timestamp_utc)
|
||||
self.sendtodb("down_pos", current_card["down_pos"], timestamp_utc)
|
||||
self.sendtodb("down_lod", current_card["down_lod"], timestamp_utc)
|
||||
self.sendtodb("sc", newSc, timestamp_utc)
|
||||
self.sendtodb("dc", newDc, timestamp_utc)
|
||||
|
||||
except Exception as e:
|
||||
print "Error during checkLatestCard..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
# def getDataLoggerStatus(self):
|
||||
# try:
|
||||
# data = json.loads(requests.get(self.device_address + "/json/pythonstatus/", verify=False).text)
|
||||
# al_status = "Not OK"
|
||||
# if data['status']['alarmLogger']:
|
||||
# al_status = "OK"
|
||||
#
|
||||
# if al_status != self.al_status_last:
|
||||
# self.sendtodb("alarmlogger_status", al_status, 0)
|
||||
# self.al_status_last = al_status
|
||||
#
|
||||
# dl_status = "Not OK"
|
||||
# if data['status']['dataLogger']:
|
||||
# dl_status = "OK"
|
||||
# if al_status != self.dl_status_last:
|
||||
# self.sendtodb("datalogger_status", dl_status, 0)
|
||||
# self.dl_status_last = dl_status
|
||||
# except Exception, e:
|
||||
# print("getDataLoggerStatus Error: {}".format(e))
|
||||
|
||||
|
||||
def poc_get_card(self, name, value):
|
||||
self.getcard(value)
|
||||
|
||||
def poc_sync(self, name, value):
|
||||
self.sendtodb("connected", "true", 0)
|
||||
|
||||
567
POCloud/poc_bak.py
Normal file
567
POCloud/poc_bak.py
Normal file
@@ -0,0 +1,567 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import traceback
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
from device_base import deviceBase
|
||||
from datetime import datetime
|
||||
import requests
|
||||
import json
|
||||
import calendar
|
||||
import pickle
|
||||
from dateutil import tz
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
from requests.packages.urllib3.exceptions import InsecurePlatformWarning
|
||||
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
requests.packages.urllib3.disable_warnings(InsecurePlatformWarning)
|
||||
|
||||
API_HTTP_TYPE = "https"
|
||||
API_DEVICE_ADDRESS = "192.168.1.30"
|
||||
API_DEVICE_PORT = 5000
|
||||
|
||||
API_BASE_URL = "{}://{}:{}".format(API_HTTP_TYPE, API_DEVICE_ADDRESS, API_DEVICE_PORT)
|
||||
|
||||
go_channels = {
|
||||
'spm_average': {'channel': 'go_average_spm', 'last_value_sent': None},
|
||||
'downhole_gross_stroke_average': {'channel': 'go_downhole_gross_stroke', 'last_value_sent': None}, # TODO: ADD
|
||||
'downhole_net_stroke_average': {'channel': 'go_downhole_net_stroke', 'last_value_sent': None}, # TODO: ADD
|
||||
'electricity_cost_total': {'channel': 'go_electricity_cost', 'last_value_sent': None},
|
||||
'fluid_level_average': {'channel': 'go_fluid_above_pump', 'last_value_sent': None},
|
||||
'inflow_rate_average': {'channel': 'go_inflow_rate', 'last_value_sent': None},
|
||||
'kWh_used_total': {'channel': 'go_kwh', 'last_value_sent': None},
|
||||
'kWh_regen_total': {'channel': 'go_kwh_regen', 'last_value_sent': None},
|
||||
'lifting_cost_average': {'channel': 'go_lifting_cost', 'last_value_sent': None},
|
||||
'peak_pr_load': {'channel': 'go_peak_load', 'last_value_sent': None},
|
||||
'min_pr_load': {'channel': 'go_min_load', 'last_value_sent': None},
|
||||
'percent_run': {'channel': 'go_percent_run', 'last_value_sent': None},
|
||||
'polished_rod_hp_average': {'channel': 'go_polished_rod_hp', 'last_value_sent': None},
|
||||
'pump_hp_average': {'channel': 'go_pump_hp', 'last_value_sent': None}, # TODO: ADD
|
||||
'production_total': {'channel': 'go_production_calculated', 'last_value_sent': None},
|
||||
'pump_intake_pressure_average': {'channel': 'go_pump_intake_pressure', 'last_value_sent': None},
|
||||
'surface_stroke_length_average': {'channel': 'go_surface_stroke_length', 'last_value_sent': None}, #TODO: ADD
|
||||
'tubing_movement_average': {'channel': "go_tubing_movement", 'last_value_sent': None}, #TODO: ADD
|
||||
}
|
||||
|
||||
tag_channels = {
|
||||
'Polished Rod HP': {'channel': 'polished_rod_hp', 'last_value_sent': None},
|
||||
'Peak Downhole Load': {'channel': 'downhole_peak_load', 'last_value_sent': None}, #TODO: ADD
|
||||
'Gross Stroke Length': {'channel': 'downhole_gross_stroke', 'last_value_sent': None},
|
||||
'Stroke Speed': {'channel': 'SPM', 'last_value_sent': None},
|
||||
'Tubing Head Pressure': {'channel': 'tubing_head_pressure', 'last_value_sent': None},
|
||||
'Minimum Polished Rod Load': {'channel': 'surface_min_load', 'last_value_sent': None},
|
||||
'Fluid Load': {'channel': 'downhole_fluid_load', 'last_value_sent': None},
|
||||
'Downhole Max. Position': {'channel': 'downhole_max_position', 'last_value_sent': None},
|
||||
'Downhole Net Stroke': {'channel': 'downhole_net_stroke', 'last_value_sent': None},
|
||||
'Pump Fill Percent': {'channel': 'fillage_percent', 'last_value_sent': None},
|
||||
'Downhole Pump HP': {'channel': 'pump_hp', 'last_value_sent': None},
|
||||
'Surface Min. Position': {'channel': 'surface_min_position', 'last_value_sent': None}, #TODO: ADD
|
||||
'Pump Intake Pressure': {'channel': 'pump_intake_pressure', 'last_value_sent': None},
|
||||
'Surface Max. Position': {'channel': 'surface_max_position', 'last_value_sent': None}, #TODO: ADD
|
||||
'Tubing Movement': {'channel': 'tubing_movement', 'last_value_sent': None},
|
||||
'Downhole Min. Position': {'channel': 'downhole_min_position', 'last_value_sent': None},
|
||||
'Peak Polished Rod Load': {'channel': 'surface_max_load', 'last_value_sent': None},
|
||||
'Minimum Downhole Load': {'channel': 'downhole_min_load', 'last_value_sent': None}, #TODO: ADD
|
||||
'Surface Stroke Length': {'channel': 'surface_stroke_length', 'last_value_sent': None},
|
||||
'Downhole Adjusted Gross Stroke': {'channel': 'downhole_adjusted_gross_stroke', 'last_value_sent': None},
|
||||
'Fluid Level': {'channel': 'fluid_above_pump', 'last_value_sent': None},
|
||||
'Stroke Production': {'channel': 'stroke_production', 'last_value_sent': None}
|
||||
}
|
||||
|
||||
dt_channels = { # Current Daily Totals
|
||||
'Electricity Cost': {'channel': 'dt_electricity_cost', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Inflow Rate': {'channel': 'dt_inflow_rate', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Energy Regen': {'channel': 'dt_kWh_regen', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Min Load': {'channel': 'dt_min_load', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Polished Rod HP': {'channel': 'dt_polished_rod_hp', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Calculated Production': {'channel': 'dt_calculated_production', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Projected Production': {'channel': 'dt_projected_production', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Pump HP': {'channel': 'dt_pump_hp', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Pump Intake Presure': {'channel': 'dt_pump_intake_pressure', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Surface Stroke Length': {'channel': 'dt_surface_stroke_length', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Tubing Movement': {'channel': 'dt_tubing_movement', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Downhole Net Stroke': {'channel': 'dt_downhole_net_stroke', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Average SPM': {'channel': 'dt_average_spm', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Peak Load': {'channel': 'dt_peak_load', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'kWh': {'channel': 'dt_kWh', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Percent Run': {'channel': 'dt_percent_run', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Fluid Level': {'channel': 'dt_fluid_level', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Lifting Cost': {'channel': 'dt_lifting_cost', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Full Card Production': {'channel': 'dt_full_card_production', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
}
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.forceSend = True
|
||||
self.version = "3"
|
||||
self.device_address = "http://192.168.1.30"
|
||||
# self.device_address = "http://localhost"
|
||||
self.cardLoopTimer = 600
|
||||
self.finished = threading.Event()
|
||||
threading.Thread.start(self)
|
||||
self.status_changed = False
|
||||
|
||||
self.last_card_send_time = 0
|
||||
self.al_status_last = False
|
||||
self.dl_status_last = False
|
||||
|
||||
# load stored Run Status ID's
|
||||
try:
|
||||
with open('runstatusIds.p', 'rb') as handle:
|
||||
self.runstatusIds = pickle.load(handle)
|
||||
|
||||
print "found pickled Run Status ID dictionary: {0}".format(self.runstatusIds)
|
||||
except:
|
||||
print "couldn't load Run Status ID's from pickle"
|
||||
self.runstatusIds = []
|
||||
|
||||
# load stored event ID's
|
||||
try:
|
||||
with open('eventIds.p', 'rb') as handle:
|
||||
self.eventIds = pickle.load(handle)
|
||||
|
||||
print "found pickled eventID dictionary: {0}".format(self.eventIds)
|
||||
except:
|
||||
print "couldn't load event ID's from pickle"
|
||||
self.eventIds = []
|
||||
|
||||
# load stored Well Test ID's
|
||||
try:
|
||||
with open('welltestIDs.p', 'rb') as handle:
|
||||
self.welltestIDs = pickle.load(handle)
|
||||
|
||||
print "found pickled welltestIDs dictionary: {0}".format(self.welltestIDs)
|
||||
except:
|
||||
print "couldn't load well test ID's from pickle"
|
||||
self.welltestIDs = []
|
||||
|
||||
# load stored Gauge Off ID's
|
||||
try:
|
||||
with open('gaugeoffIds.p', 'rb') as handle:
|
||||
self.gaugeoffIds = pickle.load(handle)
|
||||
|
||||
print "found pickled gaugeoffIds dictionary: {0}".format(self.gaugeoffIds)
|
||||
except:
|
||||
print "couldn't load gauge off ID's from pickle"
|
||||
self.gaugeoffIds = []
|
||||
|
||||
# load stored Fluid Shot ID's
|
||||
try:
|
||||
with open('fluidshotIds.p', 'rb') as handle:
|
||||
self.fluidshotIds = pickle.load(handle)
|
||||
|
||||
print "found pickled fluidshotIDs dictionary: {0}".format(self.fluidshotIds)
|
||||
except:
|
||||
print "couldn't load fluid shot ID's from pickle"
|
||||
self.fluidshotIds = []
|
||||
|
||||
# load stored note ID's
|
||||
try:
|
||||
with open('noteIDs.p', 'rb') as handle:
|
||||
self.noteIDs = pickle.load(handle)
|
||||
|
||||
print "found pickled noteID dictionary: {0}".format(self.noteIDs)
|
||||
except:
|
||||
print "couldn't load note ID's from pickle"
|
||||
self.noteIDs = []
|
||||
|
||||
# load stored last_card_id
|
||||
try:
|
||||
with open('last_card_id.p', 'rb') as handle:
|
||||
self.last_card_id = pickle.load(handle)
|
||||
|
||||
print "found pickled last_card_id: {0}".format(self.last_card_id)
|
||||
except:
|
||||
print "couldn't load last_card_id from pickle"
|
||||
self.last_card_id = 0
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
channels["status"]["last_value"] = ""
|
||||
|
||||
def run(self):
|
||||
self.last_status = ""
|
||||
runLoopStatus = "Startup"
|
||||
while True:
|
||||
try:
|
||||
if self.forceSend:
|
||||
print("!!!!!!!!!!!!!!! FORCE SEND !!!!!!!!!!!!!!!")
|
||||
|
||||
runLoopStatus = "checkStatus"
|
||||
chk_status = self.checkStatus(self.last_status)
|
||||
if chk_status:
|
||||
self.last_status = chk_status
|
||||
self.status_changed = True
|
||||
|
||||
runLoopStatus = "checkEvents"
|
||||
self.checkEvents()
|
||||
|
||||
runLoopStatus = "checkNotes"
|
||||
self.checkNotes()
|
||||
|
||||
runLoopStatus = "checkWellTests"
|
||||
self.checkWellTests()
|
||||
|
||||
runLoopStatus = "checkFluidShots"
|
||||
self.checkFluidShots()
|
||||
|
||||
runLoopStatus = "checkDailyTotals"
|
||||
self.checkDailyTotals()
|
||||
|
||||
runLoopStatus = "checkGaugeOffData"
|
||||
self.checkGaugeOffData()
|
||||
|
||||
runLoopStatus = "checkStoredValues"
|
||||
self.checkStoredValues()
|
||||
|
||||
# runLoopStatus = "getDataLoggerStatus()"
|
||||
# self.getDataLoggerStatus()
|
||||
|
||||
if self.status_changed:
|
||||
runLoopStatus = "getLatestXCards"
|
||||
self.forceSend = True
|
||||
self.checkLatestCard(numCards=5)
|
||||
else:
|
||||
runLoopStatus = "checkLatestCard"
|
||||
self.checkLatestCard()
|
||||
|
||||
# if self.forceSend or (checkBackupSkipped > checkBackupEvery):
|
||||
# runLoopStatus = "checkBackup"
|
||||
# self.checkBackup()
|
||||
# checkBackupSkipped = 0
|
||||
# checkBackupSkipped = checkBackupSkipped + 1
|
||||
|
||||
runLoopStatus = "Complete"
|
||||
time.sleep(10)
|
||||
self.forceSend = False
|
||||
except Exception, e:
|
||||
sleep_timer = 20
|
||||
print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer)
|
||||
traceback.print_exc()
|
||||
time.sleep(sleep_timer)
|
||||
|
||||
def checkStatus(self, last_status):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/run_status_log?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.runstatusIds:
|
||||
new_status = req_data['objects'][i]["run_status"]
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S.%f').timetuple())
|
||||
self.sendtodb('status', new_status, timestamp)
|
||||
self.runstatusIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.runstatusIds) > 20:
|
||||
del self.runstatusIds[0]
|
||||
with open('runstatusIds.p', 'wb') as handle:
|
||||
pickle.dump(self.runstatusIds, handle)
|
||||
|
||||
if req_data['objects'][-1:][0]['run_status'] != last_status:
|
||||
print "Status has changed from {0} to {1} @ {2}".format(last_status, req_data['objects'][-1:][0]['run_status'], req_data['objects'][-1:][0]['created_on'])
|
||||
return req_data['objects'][-1:][0]['run_status']
|
||||
except Exception as e:
|
||||
print "Error during checkStatus..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkEvents(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/events?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get( url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.eventIds:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S.%f').timetuple())
|
||||
self.sendtodbJSON('events', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.eventIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.eventIds) > 20:
|
||||
del self.eventIds[0]
|
||||
with open('eventIds.p', 'wb') as handle:
|
||||
pickle.dump(self.eventIds, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkEvents..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkNotes(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/notes?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get( url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.noteIDs:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S').timetuple())
|
||||
self.sendtodbJSON('notes', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.noteIDs.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.noteIDs) > 20:
|
||||
del self.noteIDs[0]
|
||||
with open('noteIDs.p', 'wb') as handle:
|
||||
pickle.dump(self.noteIDs, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkNotes..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkFluidShots(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/fluid_shots?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.fluidshotIds:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S').timetuple())
|
||||
self.sendtodbJSON('fluidshots', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.fluidshotIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.fluidshotIds) > 20:
|
||||
del self.fluidshotIds[0]
|
||||
with open('fluidshotIds.p', 'wb') as handle:
|
||||
pickle.dump(self.fluidshotIds, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkFluidShots..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkWellTests(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/well_test?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.welltestIDs:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S').timetuple())
|
||||
self.sendtodbJSON('welltests', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.welltestIDs.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.welltestIDs) > 20:
|
||||
del self.welltestIDs[0]
|
||||
with open('welltestIDs.p', 'wb') as handle:
|
||||
pickle.dump(self.welltestIDs, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkEvents..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkDailyTotals(self):
|
||||
global API_BASE_URL, dt_channels
|
||||
try:
|
||||
api_req = requests.get("{}/api/today_totals".format(API_BASE_URL), verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
for i in range(0, len(req_data)):
|
||||
if req_data[i]['name'] in dt_channels:
|
||||
if dt_channels[req_data[i]['name']]['last_value_sent'] is None:
|
||||
self.sendtodb(dt_channels[req_data[i]['name']]['channel'], req_data[i]['value'], 0)
|
||||
dt_channels[req_data[i]['name']]['last_value_sent'] = req_data[i]['value']
|
||||
dt_channels[req_data[i]['name']]['last_send_ts'] = time.time()
|
||||
elif abs(dt_channels[req_data[i]['name']]['last_value_sent'] - req_data[i]['value']) > dt_channels[req_data[i]['name']]['change_threshold']:
|
||||
self.sendtodb(dt_channels[req_data[i]['name']]['channel'], req_data[i]['value'], 0)
|
||||
dt_channels[req_data[i]['name']]['last_value_sent'] = req_data[i]['value']
|
||||
dt_channels[req_data[i]['name']]['last_send_ts'] = time.time()
|
||||
elif time.time() - dt_channels[req_data[i]['name']]['last_send_ts'] > 3600: # Send values every hour
|
||||
self.sendtodb(dt_channels[req_data[i]['name']]['channel'], req_data[i]['value'], 0)
|
||||
dt_channels[req_data[i]['name']]['last_value_sent'] = req_data[i]['value']
|
||||
dt_channels[req_data[i]['name']]['last_send_ts'] = time.time()
|
||||
except Exception as e:
|
||||
print "Error during checkDailyTotals..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def checkGaugeOffData(self):
|
||||
global API_BASE_URL, go_channels
|
||||
try:
|
||||
url = API_BASE_URL + '/api/gauge_off?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url , verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.gaugeoffIds:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S.%f').timetuple())
|
||||
for col_name in req_data['objects'][i]:
|
||||
if col_name in go_channels:
|
||||
self.sendtodb(go_channels[col_name]['channel'], req_data['objects'][i][col_name], timestamp)
|
||||
|
||||
self.gaugeoffIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.gaugeoffIds) > 20:
|
||||
del self.gaugeoffIds[0]
|
||||
with open('gaugeoffIds.p', 'wb') as handle:
|
||||
pickle.dump(self.gaugeoffIds, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkGaugeOffData..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkStoredValues(self):
|
||||
global API_BASE_URL, tag_channels
|
||||
try:
|
||||
api_req = requests.get( '{}/api/latest'.format(API_BASE_URL), verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
for i in range(0, len(req_data)):
|
||||
if req_data[i]['tag_name'] in tag_channels:
|
||||
if tag_channels[req_data[i]['tag_name']]['last_value_sent'] is None:
|
||||
self.sendtodb(tag_channels[req_data[i]['tag_name']]['channel'], req_data[i]['value'], calendar.timegm(datetime.strptime(req_data[i]['datetime'], '%Y-%m-%d %H:%M:%S.%f').timetuple()))
|
||||
tag_channels[req_data[i]['tag_name']]['last_value_sent'] = req_data[i]['value']
|
||||
elif req_data[i]['value'] != tag_channels[req_data[i]['tag_name']]['last_value_sent']:
|
||||
self.sendtodb(tag_channels[req_data[i]['tag_name']]['channel'], req_data[i]['value'], calendar.timegm(datetime.strptime(req_data[i]['datetime'], '%Y-%m-%d %H:%M:%S.%f').timetuple()))
|
||||
tag_channels[req_data[i]['tag_name']]['last_value_sent'] = req_data[i]['value']
|
||||
except Exception as e:
|
||||
print "Error during checkStoredValues..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
|
||||
def checkLatestCard(self, numCards = 1):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/cards?q={"order_by":[{"field":"created_on","direction":"desc"}], "limit":' + str(numCards) + "}"
|
||||
api_req = requests.get(url, verify=False)
|
||||
req_data = json.loads(api_req.text)['objects']
|
||||
|
||||
utc_tz = tz.tzutc()
|
||||
local_tz = tz.tzlocal()
|
||||
|
||||
# check the card to see if its new
|
||||
# 1. if its new send the folder/file_name to the card_history channel
|
||||
# 2. if its new and its been 10 minutes since you last sent an entire card, then send up all of the data
|
||||
for i in range(0, len(req_data)):
|
||||
current_card = req_data[i]
|
||||
if current_card['_id'] > self.last_card_id:
|
||||
#2016-11-23T00:37:02.806026
|
||||
dt = datetime.strptime(current_card['created_on'], '%Y-%m-%dT%H:%M:%S.%f')
|
||||
dt_utc = dt.replace(tzinfo=utc_tz)
|
||||
dt_local = dt_utc.astimezone(tz.tzlocal())
|
||||
|
||||
timestamp_utc = calendar.timegm(dt_utc.timetuple())
|
||||
timestamp_local = calendar.timegm(dt_local.timetuple())
|
||||
|
||||
print "New card detected @ {0}".format(datetime.strftime(dt_local, "%Y-%m-%dT%H:%M:%S.%f"))
|
||||
# set the last value = to current value and upload your data
|
||||
self.sendtodb("card_history", current_card['_id'], timestamp_utc)
|
||||
self.last_card_id = current_card['_id']
|
||||
with open('last_card_id.p', 'wb') as handle:
|
||||
pickle.dump(self.last_card_id, handle)
|
||||
|
||||
# check the last time the card was updated
|
||||
if (time.time() - self.last_card_send_time) > self.cardLoopTimer or self.status_changed or self.forceSend:
|
||||
# its been 10 minutes, send the full upload
|
||||
print "Either status has changed or last stored card is too old."
|
||||
self.sendtodb("cardtype", current_card['stroke_type'], int(timestamp_utc))
|
||||
|
||||
# TODO: FIX CARD PARSING
|
||||
s_p = current_card["surf_pos"].replace("[", "").replace("]", "").split(", ")
|
||||
s_l = current_card["surf_lod"].replace("[", "").replace("]", "").split(", ")
|
||||
d_p = current_card["down_pos"].replace("[", "").replace("]", "").split(", ")
|
||||
d_l = current_card["down_lod"].replace("[", "").replace("]", "").split(", ")
|
||||
newSc = "["
|
||||
newDc = "["
|
||||
|
||||
for i in range(len(s_p)):
|
||||
try:
|
||||
if s_p[i] is None:
|
||||
continue
|
||||
if s_p[i] != 0.0 and s_l[i] != 0.0:
|
||||
newSc += "[" + str(round(float(s_p[i]), 3)) + "," + str(round(float(s_l[i]), 3)) + "],"
|
||||
except:
|
||||
pass
|
||||
newSc = newSc[:-1] + "]"
|
||||
|
||||
for i in range(len(d_p)):
|
||||
try:
|
||||
if d_p[i] is None:
|
||||
continue
|
||||
if d_p[i] != 0.0 and d_l[i] != 0.0:
|
||||
newDc += "[" + str(round(float(d_p[i]), 3)) + "," + str(round(float(d_l[i]), 3)) + "],"
|
||||
except:
|
||||
pass
|
||||
newDc = newDc[:-1] + "]"
|
||||
|
||||
self.sendtodb("surf_pos", current_card["surf_pos"], timestamp_utc)
|
||||
self.sendtodb("surf_lod", current_card["surf_lod"], timestamp_utc)
|
||||
self.sendtodb("down_pos", current_card["down_pos"], timestamp_utc)
|
||||
self.sendtodb("down_lod", current_card["down_lod"], timestamp_utc)
|
||||
self.sendtodb("sc", newSc, timestamp_utc)
|
||||
self.sendtodb("dc", newDc, timestamp_utc)
|
||||
|
||||
except Exception as e:
|
||||
print "Error during checkLatestCard..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
# def getDataLoggerStatus(self):
|
||||
# try:
|
||||
# data = json.loads(requests.get(self.device_address + "/json/pythonstatus/", verify=False).text)
|
||||
# al_status = "Not OK"
|
||||
# if data['status']['alarmLogger']:
|
||||
# al_status = "OK"
|
||||
#
|
||||
# if al_status != self.al_status_last:
|
||||
# self.sendtodb("alarmlogger_status", al_status, 0)
|
||||
# self.al_status_last = al_status
|
||||
#
|
||||
# dl_status = "Not OK"
|
||||
# if data['status']['dataLogger']:
|
||||
# dl_status = "OK"
|
||||
# if al_status != self.dl_status_last:
|
||||
# self.sendtodb("datalogger_status", dl_status, 0)
|
||||
# self.dl_status_last = dl_status
|
||||
# except Exception, e:
|
||||
# print("getDataLoggerStatus Error: {}".format(e))
|
||||
|
||||
|
||||
def poc_get_card(self, name, value):
|
||||
self.getcard(value)
|
||||
|
||||
def poc_sync(self, name, value):
|
||||
self.sendtodb("connected", "true", 0)
|
||||
return True
|
||||
|
||||
def poc_set_address(self, name, value):
|
||||
self.device_address = value
|
||||
return True
|
||||
|
||||
def poc_refresh_data(self, name, value):
|
||||
self.forceSend = True
|
||||
return True
|
||||
Reference in New Issue
Block a user