Compare commits
45 Commits
version2.0
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3136177cdb | ||
|
|
dd51b3f98d | ||
|
|
cb03d361e3 | ||
|
|
ea93b8ac12 | ||
|
|
5cee82a34f | ||
|
|
4d356a7fa1 | ||
|
|
789cc193f3 | ||
|
|
9b31ec4e54 | ||
|
|
2421ca67f3 | ||
|
|
f92788d250 | ||
|
|
27253b6f66 | ||
|
|
9f984447da | ||
|
|
bb449c709c | ||
|
|
c690d9da69 | ||
|
|
cf2cecc495 | ||
|
|
2ac07f8a75 | ||
|
|
cbf89704d9 | ||
|
|
f9a7bed354 | ||
|
|
b1c39e3ee6 | ||
|
|
e5fac283a5 | ||
|
|
fe4f07846c | ||
|
|
4caaf23c73 | ||
|
|
a80c387dd2 | ||
|
|
a882061d05 | ||
|
|
81bb6e5471 | ||
|
|
80a86f76f1 | ||
|
|
f521804072 | ||
|
|
4cd4ff9702 | ||
|
|
5fc1a9435d | ||
|
|
7436996f7d | ||
|
|
ab125a721e | ||
|
|
8992ab0c4f | ||
|
|
660a28b608 | ||
|
|
084c2d8cdf | ||
|
|
509fac659f | ||
|
|
d43cb76fe2 | ||
|
|
deec5db66d | ||
|
|
c3693d8e4b | ||
|
|
147a31f2d1 | ||
|
|
28ee08412f | ||
|
|
7bcf6a3ecb | ||
|
|
2be122cf0d | ||
|
|
d8e3382a8d | ||
|
|
348d6c3d53 | ||
|
|
ea692ba469 |
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
*.Sem
|
||||
*.Wrk
|
||||
*.BAK*
|
||||
*.ACD_1769-L24ER-QB1B
|
||||
|
||||
BIN
Documentation/20150619_HenryPumpVFD_UsersManual.docx
Normal file
BIN
Documentation/20150619_HenryPumpVFD_UsersManual.pdf
Normal file
BIN
Documentation/Drive parameters.xlsx
Executable file
11
Documentation/Horn Signals.md
Normal file
@@ -0,0 +1,11 @@
|
||||
Horn Signals - 1/12/2015 - Midland, TX
|
||||
======================================
|
||||
|
||||
**30 seconds before unit restarts**
|
||||
|
||||
- Horn sounds once (for 2 sec)
|
||||
|
||||
|
||||
**On startup ( any startup )**
|
||||
|
||||
- Horn Sounds for 5 sec
|
||||
BIN
Documentation/Images/2015-06-19 10_36_54-1-MAIN.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
Documentation/Images/2015-06-19 10_37_43-1-MAIN.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
Documentation/Images/2015-06-19 10_39_50-4-Unit_Setup.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
Documentation/Images/2015-06-19 10_40_20-Images.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
Documentation/Images/2015-06-19 10_42_29-4-Unit_Setup.png
Normal file
|
After Width: | Height: | Size: 36 KiB |
BIN
Documentation/Images/2015-06-19 10_44_04-4-Unit_Setup.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
Documentation/Images/2015-06-19 10_44_39-4-Unit_Setup.png
Normal file
|
After Width: | Height: | Size: 27 KiB |
BIN
Documentation/Images/2015-06-19 10_45_24-4-Unit_Setup.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
BIN
Documentation/Images/2015-06-19 10_45_38-4-Unit_Setup.png
Normal file
|
After Width: | Height: | Size: 32 KiB |
BIN
Documentation/Images/2015-06-19 10_46_03-4-Unit_Setup.png
Normal file
|
After Width: | Height: | Size: 25 KiB |
BIN
Documentation/Images/2015-06-19 10_46_19-4-Unit_Setup.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
BIN
Documentation/Images/2015-06-19 10_46_57-4-Unit_Setup.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
Documentation/Images/2015-06-19 10_49_00-99-Graph_XY_Current.png
Normal file
|
After Width: | Height: | Size: 34 KiB |
BIN
Documentation/Images/2015-06-19 10_54_59-99-Graph_XY_Current.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
Documentation/Images/2015-06-19 10_56_04-97-Pump.png
Normal file
|
After Width: | Height: | Size: 36 KiB |
BIN
Documentation/Images/2015-06-19 10_56_43-3-Unit_Control.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
Documentation/Images/2015-06-19 10_57_00-3-Unit_Control.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
Documentation/Images/2015-06-19 10_57_21-3-Unit_Control.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
Documentation/Images/2015-06-19 10_57_42-3-Unit_Control.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
Documentation/Images/2015-06-19 10_58_05-1-MAIN.png
Normal file
|
After Width: | Height: | Size: 37 KiB |
BIN
Documentation/Images/2015-06-19 10_58_25-5-Analog_Status.png
Normal file
|
After Width: | Height: | Size: 37 KiB |
BIN
Documentation/Images/2015-06-19 10_58_46-5-Analog_Status.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
Documentation/Images/2015-06-19 10_59_05-5-Analog_Status.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
Documentation/Images/2015-06-19 11_07_37-75-Event_List.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
Documentation/Images/2015-06-19 11_08_05-7-Trends.png
Normal file
|
After Width: | Height: | Size: 25 KiB |
BIN
Documentation/Images/2015-06-19 11_08_25-11-WellTest.png
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
Documentation/Images/2015-06-19 11_09_12-9-SpeedProfile.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
12165
PLC/HP_POC_VFD_L19.ACD
Normal file
9928
PLC/Henry_Pump_VFD_L24.ACD_V21
Normal file
11961
PLC/Henry_Pump_VFD_L24_V30.ACD
Normal file
100
POCloud/.vscode/.ropeproject/config.py
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
# The default ``config.py``
|
||||
# flake8: noqa
|
||||
|
||||
|
||||
def set_prefs(prefs):
|
||||
"""This function is called before opening the project"""
|
||||
|
||||
# Specify which files and folders to ignore in the project.
|
||||
# Changes to ignored resources are not added to the history and
|
||||
# VCSs. Also they are not returned in `Project.get_files()`.
|
||||
# Note that ``?`` and ``*`` match all characters but slashes.
|
||||
# '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
|
||||
# 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
|
||||
# '.svn': matches 'pkg/.svn' and all of its children
|
||||
# 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
|
||||
# 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
|
||||
prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
|
||||
'.hg', '.svn', '_svn', '.git', '.tox']
|
||||
|
||||
# Specifies which files should be considered python files. It is
|
||||
# useful when you have scripts inside your project. Only files
|
||||
# ending with ``.py`` are considered to be python files by
|
||||
# default.
|
||||
#prefs['python_files'] = ['*.py']
|
||||
|
||||
# Custom source folders: By default rope searches the project
|
||||
# for finding source folders (folders that should be searched
|
||||
# for finding modules). You can add paths to that list. Note
|
||||
# that rope guesses project source folders correctly most of the
|
||||
# time; use this if you have any problems.
|
||||
# The folders should be relative to project root and use '/' for
|
||||
# separating folders regardless of the platform rope is running on.
|
||||
# 'src/my_source_folder' for instance.
|
||||
#prefs.add('source_folders', 'src')
|
||||
|
||||
# You can extend python path for looking up modules
|
||||
#prefs.add('python_path', '~/python/')
|
||||
|
||||
# Should rope save object information or not.
|
||||
prefs['save_objectdb'] = True
|
||||
prefs['compress_objectdb'] = False
|
||||
|
||||
# If `True`, rope analyzes each module when it is being saved.
|
||||
prefs['automatic_soa'] = True
|
||||
# The depth of calls to follow in static object analysis
|
||||
prefs['soa_followed_calls'] = 0
|
||||
|
||||
# If `False` when running modules or unit tests "dynamic object
|
||||
# analysis" is turned off. This makes them much faster.
|
||||
prefs['perform_doa'] = True
|
||||
|
||||
# Rope can check the validity of its object DB when running.
|
||||
prefs['validate_objectdb'] = True
|
||||
|
||||
# How many undos to hold?
|
||||
prefs['max_history_items'] = 32
|
||||
|
||||
# Shows whether to save history across sessions.
|
||||
prefs['save_history'] = True
|
||||
prefs['compress_history'] = False
|
||||
|
||||
# Set the number spaces used for indenting. According to
|
||||
# :PEP:`8`, it is best to use 4 spaces. Since most of rope's
|
||||
# unit-tests use 4 spaces it is more reliable, too.
|
||||
prefs['indent_size'] = 4
|
||||
|
||||
# Builtin and c-extension modules that are allowed to be imported
|
||||
# and inspected by rope.
|
||||
prefs['extension_modules'] = []
|
||||
|
||||
# Add all standard c-extensions to extension_modules list.
|
||||
prefs['import_dynload_stdmods'] = True
|
||||
|
||||
# If `True` modules with syntax errors are considered to be empty.
|
||||
# The default value is `False`; When `False` syntax errors raise
|
||||
# `rope.base.exceptions.ModuleSyntaxError` exception.
|
||||
prefs['ignore_syntax_errors'] = False
|
||||
|
||||
# If `True`, rope ignores unresolvable imports. Otherwise, they
|
||||
# appear in the importing namespace.
|
||||
prefs['ignore_bad_imports'] = False
|
||||
|
||||
# If `True`, rope will insert new module imports as
|
||||
# `from <package> import <module>` by default.
|
||||
prefs['prefer_module_from_imports'] = False
|
||||
|
||||
# If `True`, rope will transform a comma list of imports into
|
||||
# multiple separate import statements when organizing
|
||||
# imports.
|
||||
prefs['split_imports'] = False
|
||||
|
||||
# If `True`, rope will sort imports alphabetically by module name
|
||||
# instead of alphabetically by import statement, with from imports
|
||||
# after normal imports.
|
||||
prefs['sort_imports_alphabetically'] = False
|
||||
|
||||
|
||||
def project_opened(project):
|
||||
"""This function is called after opening the project"""
|
||||
# Do whatever you like here!
|
||||
BIN
POCloud/.vscode/.ropeproject/objectdb
vendored
Normal file
3
POCloud/.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"python.pythonPath": "/usr/local/bin/python"
|
||||
}
|
||||
93
POCloud/HTML Templates/Fluid_Shots.html
Normal file
@@ -0,0 +1,93 @@
|
||||
<div style='margin-top: 1em;' class='col-xs-12'>
|
||||
<div class="input-daterange input-group" id="datePicker">
|
||||
<input id="fromDate" data-daysofhistory="30" type="text" class="form-control" name="start">
|
||||
<span class="input-group-addon">to</span>
|
||||
<input class="form-control" id="toDate" type="text" name="end">
|
||||
<span class='input-group-btn'>
|
||||
<a href="" id="runPickerBtn" class="btn btn-theme">Run</a>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-12' style='margin-top: 2em;'>
|
||||
<table class="table">
|
||||
<thead>
|
||||
<th>Shot ID</th>
|
||||
<th>Date & Time</th>
|
||||
<th>Intake Pressure</th>
|
||||
<th>Fluid Gradient</th>
|
||||
<th>Friction</th>
|
||||
<th>Taken By</th>
|
||||
</thead>
|
||||
|
||||
<tbody id="output">
|
||||
</tbody>
|
||||
</table>
|
||||
<!--<%= JSON.stringify(channels['poc.events'].value) %>-->
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
var nodeID = <%= node.nodeId %>;
|
||||
var nodeType = '<%= node.nodetypeName %>';
|
||||
var channelID = <%= channels['poc.fluidshots'].channelId %>;
|
||||
console.log({nodeID:nodeID, nodeType:nodeType, channelID: channelID})
|
||||
|
||||
var formatDate = function(str){
|
||||
var c1, c2;
|
||||
c1 = new Date(str);
|
||||
c2 = (c1.getTime() / 1000);
|
||||
c2 = Math.floor(c2);
|
||||
return c2.toString();
|
||||
};
|
||||
|
||||
var updateTable = function(){
|
||||
var apiData, start, end;
|
||||
var $output = $('#output');
|
||||
start = $('#datePicker').find('#fromDate');
|
||||
dateString = start.val().replace(/-/g, "/");
|
||||
start = formatDate(dateString);
|
||||
end = $('#datePicker').find('#toDate');
|
||||
dateString = end.val().replace(/-/g, "/");
|
||||
end = formatDate(dateString);
|
||||
apiData = "&nodelist[0][nodeId]=" + nodeID.toString() + "&nodelist[0][channelId]=" + channelID.toString();
|
||||
apiData += "&start=" + start + "&end=" + end;
|
||||
$.ajax({
|
||||
url: "http://www.pocloud.io/api2/Nodechannels",
|
||||
data: apiData,
|
||||
dataType: "json",
|
||||
type: "GET",
|
||||
success: function(data) {
|
||||
console.log(data);
|
||||
var notes = data.listofstreams[0];
|
||||
for(var i = notes.stream.length-1; i >= 0; i--) {
|
||||
var note = notes.stream[i];
|
||||
var timestamp = note.x;
|
||||
var jsonBlock = note.y;
|
||||
var n = JSON.parse(jsonBlock);
|
||||
var row = "<tr>";
|
||||
row += "<td>" + n.id + "</td>";
|
||||
row += "<td>" + n.shot_datetime + "</td>";
|
||||
row += "<td>" + n.pump_intake_pressure + "</td>";
|
||||
row += "<td>" + n.fluid_gradient + "</td>";
|
||||
row += "<td>" + n.friction + "</td>";
|
||||
row += "<td>" + n.taken_by + "</td>";
|
||||
row += "</tr>"
|
||||
$output.append(row)
|
||||
//$output.append(JSON.stringify(jsonBlock));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$(document).on('click', '#runPickerBtn', function(e){
|
||||
e.preventDefault();
|
||||
updateTable();
|
||||
});
|
||||
$(document).ready(function(){
|
||||
updateTable();
|
||||
})
|
||||
|
||||
|
||||
</script>
|
||||
154
POCloud/HTML Templates/Gauge Off.html
Normal file
@@ -0,0 +1,154 @@
|
||||
<div class='col-xs-12' style="padding-top: 1em; margin-bottom: 1em;">
|
||||
<div class="input-daterange input-group" id="datepicker">
|
||||
<input data-chartid="dynamicChart" id="fromDate" data-daysofhistory="30" type="text" class="form-control" name="start">
|
||||
<span class="input-group-addon">to</span>
|
||||
<input class="form-control" data-chartid="dynamicChart" id="toDate" type="text" name="end">
|
||||
<span class='input-group-btn'>
|
||||
<a href="#!" data-chartid="dynamicChart" data-otherchartids="statusTimeline" class="btn chart-update btn-theme">Run</a>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class='clearfix col-xs-12' style='height: 300px' id="dynamicChart" data-chart="dynamicchart" data-daysofhistory="30" data-chartlabel="Data" data-ylabel="" data-xlabel="Date" data-units="" data-channelnames="poc.go_kwh,poc.go_electricity_cost,poc.go_peak_load,poc.go_min_load,poc.go_average_spm,poc.go_production_calculated,poc.go_full_card_production,poc.go_polished_rod_hp,poc.go_lifting_cost,poc.go_fluid_above_pump,poc.go_pump_intake_pressure,poc.go_kwh_regen,poc.go_inflow_rate"></div>
|
||||
|
||||
<div class='col-xs-12' style='margin-top: 2em;'>
|
||||
<table class="table">
|
||||
<thead>
|
||||
<th>Date</th>
|
||||
<th>% Run</th>
|
||||
<th>kWh</th>
|
||||
<th>Electricity Cost</th>
|
||||
<th>Peak Load</th>
|
||||
<th>Min. Load</th>
|
||||
<th>Average SPM</th>
|
||||
<th>Production</th>
|
||||
<th>Full Card Production</th>
|
||||
<th>Polished Rod HP</th>
|
||||
<th>Lifting Cost</th>
|
||||
<th>Fluid Level</th>
|
||||
<th>Pump Intake Pressure</th>
|
||||
<th>kWh Regen</th>
|
||||
<th>Inflow Rate</th>
|
||||
</thead>
|
||||
|
||||
<tbody id="output">
|
||||
</tbody>
|
||||
|
||||
</table>
|
||||
</div>
|
||||
|
||||
|
||||
<style>
|
||||
.dynamic-chart-form {
|
||||
background-color: whiteSmoke;
|
||||
padding: 1em 0.5em;
|
||||
margin-top: 1em;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
var tableData = {};
|
||||
var nodeID = <%= node.nodeId %>;
|
||||
var nodeType = '<%= node.nodetypeName %>';
|
||||
var channelData = {
|
||||
go_percent_run: {name:<%= channels['poc.go_percent_run'].channelId %>, values:[]},
|
||||
go_kwh: {name:<%= channels['poc.go_kwh'].channelId %>, values:[]},
|
||||
go_electricity_cost: {name:<%= channels['poc.go_electricity_cost'].channelId %>, values:[]},
|
||||
go_peak_load: {name:<%= channels['poc.go_peak_load'].channelId %>, values:[]},
|
||||
go_min_load: {name:<%= channels['poc.go_min_load'].channelId %>, values:[]},
|
||||
go_average_spm: {name:<%= channels['poc.go_average_spm'].channelId %>, values:[]},
|
||||
go_production_calculated: {name:<%= channels['poc.go_production_calculated'].channelId %>, values:[]},
|
||||
go_polished_rod_hp: {name:<%= channels['poc.go_polished_rod_hp'].channelId %>, values:[]},
|
||||
go_lifting_cost: {name:<%= channels['poc.go_lifting_cost'].channelId %>, values:[]},
|
||||
go_fluid_above_pump: {name:<%= channels['poc.go_fluid_above_pump'].channelId %>, values:[]},
|
||||
go_pump_intake_pressure: {name:<%= channels['poc.go_pump_intake_pressure'].channelId %>, values:[]},
|
||||
go_kwh_regen: {name:<%= channels['poc.go_kwh_regen'].channelId %>, values:[]},
|
||||
go_inflow_rate: {name:<%= channels['poc.go_inflow_rate'].channelId %>, values:[]}
|
||||
};
|
||||
|
||||
var formatDate = function(str){
|
||||
var c1 = new Date(str);
|
||||
var c2 = (c1.getTime() / 1000);
|
||||
c2 = Math.floor(c2);
|
||||
return c2.toString();
|
||||
};
|
||||
|
||||
var updateTable = function(chID, chName){
|
||||
|
||||
var start = $('#datepicker').find('#fromDate');
|
||||
//console.log({start:start.val()});
|
||||
dateString = start.val().replace(/-/g, "/");
|
||||
start = formatDate(dateString);
|
||||
|
||||
var end = $('#datepicker').find('#toDate');
|
||||
//console.log({end:end.val()});
|
||||
dateString = end.val().replace(/-/g, "/");
|
||||
end = formatDate(dateString);
|
||||
|
||||
var apiData = "&nodelist[0][nodeId]=" + nodeID.toString() + "&nodelist[0][channelId]=" + chID.toString();
|
||||
apiData += "&start=" + start + "&end=" + end;
|
||||
|
||||
$.ajax({
|
||||
url: '<%= rootURL %>' + '/api2/Nodechannels',
|
||||
data: apiData,
|
||||
dataType: "json",
|
||||
type: "GET",
|
||||
success: function(data) {
|
||||
var dataPoints = data.listofstreams[0];
|
||||
for(var i = dataPoints.stream.length-1; i > 0; i--){
|
||||
var dpt = dataPoints.stream[i];
|
||||
var timestamp = dpt.x;
|
||||
var val = dpt.y;
|
||||
channelData[chName]['values'].push({t:timestamp, val:val});
|
||||
if (!tableData.hasOwnProperty(timestamp)){
|
||||
tableData[timestamp] = {};
|
||||
}
|
||||
tableData[timestamp][chName] = val;
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
$(document).ready(function(){
|
||||
var $output = $('#output');
|
||||
for (var channel in channelData){
|
||||
if(channelData.hasOwnProperty(channel)){
|
||||
updateTable(channelData[channel]['name'], channel);
|
||||
}
|
||||
}
|
||||
|
||||
var buildTable = function(){
|
||||
for (var timestp in tableData){
|
||||
if(tableData.hasOwnProperty(timestp)){
|
||||
var date = new Date(parseInt(timestp) * 1000);
|
||||
var dateString = (date.getMonth() +1) + "/" + date.getDate() + "/" + date.getFullYear();
|
||||
|
||||
var row = "<tr><td>" + dateString + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_percent_run']+ "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_kwh'] + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_electricity_cost'] + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_peak_load'] + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_min_load'] + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_average_spm'] + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_production_calculated'] + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_polished_rod_hp'] + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_lifting_cost'] + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_fluid_above_pump'] + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_pump_intake_pressure'] + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_kwh_regen'] + "</td>";
|
||||
row += "<td>" + tableData[timestp]['go_inflow_rate'] + "</td></tr>";
|
||||
console.log(row);
|
||||
$output.append(row);
|
||||
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//console.log({tableData:tableData, channelData:channelData});
|
||||
setTimeout(buildTable, 3000);
|
||||
|
||||
|
||||
|
||||
|
||||
});
|
||||
|
||||
</script>
|
||||
92
POCloud/HTML Templates/Notes.html
Normal file
@@ -0,0 +1,92 @@
|
||||
<div style='margin-top: 1em;' class='col-xs-12'>
|
||||
<div class="input-daterange input-group" id="datePicker">
|
||||
<input id="fromDate" data-daysofhistory="30" type="text" class="form-control" name="start">
|
||||
<span class="input-group-addon">to</span>
|
||||
<input class="form-control" id="toDate" type="text" name="end">
|
||||
<span class='input-group-btn'>
|
||||
<a href="" id="runPickerBtn" class="btn btn-theme">Run</a>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-12' style='margin-top: 2em;'>
|
||||
<table class="table">
|
||||
<thead>
|
||||
<th>Note ID</th>
|
||||
<th>Date & Time</th>
|
||||
<th>Author</th>
|
||||
<th>Type</th>
|
||||
<th>Note</th>
|
||||
<th>Stroke</th>
|
||||
</thead>
|
||||
|
||||
<tbody id="output">
|
||||
</tbody>
|
||||
</table>
|
||||
<!--<%= JSON.stringify(channels['poc.events'].value) %>-->
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
var nodeID = <%= node.nodeId %>;
|
||||
var nodeType = '<%= node.nodetypeName %>';
|
||||
var channelID = <%= channels['poc.notes'].channelId %>;
|
||||
console.log({nodeID:nodeID, nodeType:nodeType, channelID: channelID})
|
||||
|
||||
var formatDate = function(str){
|
||||
var c1, c2;
|
||||
c1 = new Date(str);
|
||||
c2 = (c1.getTime() / 1000);
|
||||
c2 = Math.floor(c2);
|
||||
return c2.toString();
|
||||
};
|
||||
|
||||
var updateTable = function(){
|
||||
var apiData, start, end;
|
||||
var $output = $('#output');
|
||||
start = $('#datePicker').find('#fromDate');
|
||||
dateString = start.val().replace(/-/g, "/");
|
||||
start = formatDate(dateString);
|
||||
end = $('#datePicker').find('#toDate');
|
||||
dateString = end.val().replace(/-/g, "/");
|
||||
end = formatDate(dateString);
|
||||
apiData = "&nodelist[0][nodeId]=" + nodeID.toString() + "&nodelist[0][channelId]=" + channelID.toString();
|
||||
apiData += "&start=" + start + "&end=" + end;
|
||||
$.ajax({
|
||||
url: "http://www.pocloud.io/api2/Nodechannels",
|
||||
data: apiData,
|
||||
dataType: "json",
|
||||
type: "GET",
|
||||
success: function(data) {
|
||||
var notes = data.listofstreams[0];
|
||||
for(var i = notes.stream.length-1; i >= 0; i--) {
|
||||
var note = notes.stream[i];
|
||||
var timestamp = note.x;
|
||||
var jsonBlock = note.y;
|
||||
var n = JSON.parse(jsonBlock);
|
||||
var row = "<tr>";
|
||||
row += "<td>" + n.id + "</td>";
|
||||
row += "<td>" + n.date_time + "</td>";
|
||||
row += "<td>" + n.author + "</td>";
|
||||
row += "<td>" + n.ntype + "</td>";
|
||||
row += "<td>" + n.note + "</td>";
|
||||
row += "<td>" + n.stroke + "</td>";
|
||||
row += "</tr>"
|
||||
$output.append(row)
|
||||
//$output.append(JSON.stringify(jsonBlock));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$(document).on('click', '#runPickerBtn', function(e){
|
||||
e.preventDefault();
|
||||
updateTable();
|
||||
});
|
||||
$(document).ready(function(){
|
||||
updateTable();
|
||||
})
|
||||
|
||||
|
||||
</script>
|
||||
95
POCloud/HTML Templates/Well_Tests.html
Normal file
@@ -0,0 +1,95 @@
|
||||
<div style='margin-top: 1em;' class='col-xs-12'>
|
||||
<div class="input-daterange input-group" id="datePicker">
|
||||
<input id="fromDate" data-daysofhistory="30" type="text" class="form-control" name="start">
|
||||
<span class="input-group-addon">to</span>
|
||||
<input class="form-control" id="toDate" type="text" name="end">
|
||||
<span class='input-group-btn'>
|
||||
<a href="" id="runPickerBtn" class="btn btn-theme">Run</a>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-12' style='margin-top: 2em;'>
|
||||
<table class="table">
|
||||
<thead>
|
||||
<th>Test ID</th>
|
||||
<th>Date & Time</th>
|
||||
<th>Oil (BBL)</th>
|
||||
<th>Water (BBL)</th>
|
||||
<th>Gas (MMCF)</th>
|
||||
<th>K-Factor</th>
|
||||
<th>Hours</th>
|
||||
</thead>
|
||||
|
||||
<tbody id="output">
|
||||
</tbody>
|
||||
</table>
|
||||
<!--<%= JSON.stringify(channels['poc.events'].value) %>-->
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
var nodeID = <%= node.nodeId %>;
|
||||
var nodeType = '<%= node.nodetypeName %>';
|
||||
var channelID = <%= channels['poc.welltests'].channelId %>;
|
||||
console.log({nodeID:nodeID, nodeType:nodeType, channelID: channelID})
|
||||
|
||||
var formatDate = function(str){
|
||||
var c1, c2;
|
||||
c1 = new Date(str);
|
||||
c2 = (c1.getTime() / 1000);
|
||||
c2 = Math.floor(c2);
|
||||
return c2.toString();
|
||||
};
|
||||
|
||||
var updateTable = function(){
|
||||
var apiData, start, end;
|
||||
var $output = $('#output');
|
||||
start = $('#datePicker').find('#fromDate');
|
||||
dateString = start.val().replace(/-/g, "/");
|
||||
start = formatDate(dateString);
|
||||
end = $('#datePicker').find('#toDate');
|
||||
dateString = end.val().replace(/-/g, "/");
|
||||
end = formatDate(dateString);
|
||||
apiData = "&nodelist[0][nodeId]=" + nodeID.toString() + "&nodelist[0][channelId]=" + channelID.toString();
|
||||
apiData += "&start=" + start + "&end=" + end;
|
||||
$.ajax({
|
||||
url: "http://www.pocloud.io/api2/Nodechannels",
|
||||
data: apiData,
|
||||
dataType: "json",
|
||||
type: "GET",
|
||||
success: function(data) {
|
||||
console.log(data);
|
||||
var notes = data.listofstreams[0];
|
||||
for(var i = notes.stream.length-1; i >= 0; i--) {
|
||||
var note = notes.stream[i];
|
||||
var timestamp = note.x;
|
||||
var jsonBlock = note.y;
|
||||
var n = JSON.parse(jsonBlock);
|
||||
var row = "<tr>";
|
||||
row += "<td>" + n.id + "</td>";
|
||||
row += "<td>" + n.test_date + "</td>";
|
||||
row += "<td>" + n.test_volume_oil + "</td>";
|
||||
row += "<td>" + n.test_volume_water + "</td>";
|
||||
row += "<td>" + n.test_volume_gas + "</td>";
|
||||
row += "<td>" + n.k_factor + "</td>";
|
||||
row += "<td>" + n.test_hours + "</td>";
|
||||
row += "</tr>"
|
||||
$output.append(row)
|
||||
//$output.append(JSON.stringify(jsonBlock));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$(document).on('click', '#runPickerBtn', function(e){
|
||||
e.preventDefault();
|
||||
updateTable();
|
||||
});
|
||||
$(document).ready(function(){
|
||||
updateTable();
|
||||
})
|
||||
|
||||
|
||||
</script>
|
||||
96
POCloud/HTML Templates/events.html
Normal file
@@ -0,0 +1,96 @@
|
||||
<div style='margin-top: 1em;' class='col-xs-12'>
|
||||
<div class="input-daterange input-group" id="datePicker">
|
||||
<input id="fromDate" data-daysofhistory="30" type="text" class="form-control" name="start">
|
||||
<span class="input-group-addon">to</span>
|
||||
<input class="form-control" id="toDate" type="text" name="end">
|
||||
<span class='input-group-btn'>
|
||||
<a href="" id="runPickerBtn" class="btn btn-theme">Run</a>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-12' style='margin-top: 2em;'>
|
||||
<table class="table">
|
||||
<thead>
|
||||
<th>Event ID</th>
|
||||
<th>Date & Time</th>
|
||||
<th>Type</th>
|
||||
<th>Condition</th>
|
||||
<th>Tag</th>
|
||||
<th>Value</th>
|
||||
<th>Device Name</th>
|
||||
<th>Stroke Number</th>
|
||||
</thead>
|
||||
|
||||
<tbody id="output">
|
||||
</tbody>
|
||||
</table>
|
||||
<!--<%= JSON.stringify(channels['poc.events'].value) %>-->
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
var nodeID = <%= node.nodeId %>;
|
||||
var nodeType = '<%= node.nodetypeName %>';
|
||||
var channelID = <%= channels['poc.events'].channelId %>;
|
||||
console.log({nodeID:nodeID, nodeType:nodeType, channelID: channelID})
|
||||
|
||||
var formatDate = function(str){
|
||||
var c1, c2;
|
||||
c1 = new Date(str);
|
||||
c2 = (c1.getTime() / 1000);
|
||||
c2 = Math.floor(c2);
|
||||
return c2.toString();
|
||||
};
|
||||
|
||||
var updateTable = function(){
|
||||
var apiData, start, end;
|
||||
var $output = $('#output');
|
||||
start = $('#datePicker').find('#fromDate');
|
||||
dateString = start.val().replace(/-/g, "/");
|
||||
start = formatDate(dateString);
|
||||
end = $('#datePicker').find('#toDate');
|
||||
dateString = end.val().replace(/-/g, "/");
|
||||
end = formatDate(dateString);
|
||||
apiData = "&nodelist[0][nodeId]=" + nodeID.toString() + "&nodelist[0][channelId]=" + channelID.toString();
|
||||
apiData += "&start=" + start + "&end=" + end;
|
||||
$.ajax({
|
||||
url: "http://www.pocloud.io/api2/Nodechannels",
|
||||
data: apiData,
|
||||
dataType: "json",
|
||||
type: "GET",
|
||||
success: function(data) {
|
||||
var events = data.listofstreams[0];
|
||||
for(var i = events.stream.length-1; i >= 0; i--) {
|
||||
var event = events.stream[i];
|
||||
var timestamp = event.x;
|
||||
var jsonBlock = event.y;
|
||||
var ev = JSON.parse(jsonBlock);
|
||||
var row = "<tr>";
|
||||
row += "<td>" + ev.id + "</td>";
|
||||
row += "<td>" + ev.datetime + "</td>";
|
||||
row += "<td>" + ev.type + "</td>";
|
||||
row += "<td>" + ev.cond + "</td>";
|
||||
row += "<td>" + ev.tag + "</td>";
|
||||
row += "<td>" + ev.value + "</td>";
|
||||
row += "<td>" + ev.device_name + "</td>";
|
||||
row += "<td>" + ev.stroke_number + "</td>";
|
||||
row += "</tr>"
|
||||
$output.append(row)
|
||||
//$output.append(JSON.stringify(jsonBlock));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$(document).on('click', '#runPickerBtn', function(e){
|
||||
e.preventDefault();
|
||||
updateTable();
|
||||
});
|
||||
$(document).ready(function(){
|
||||
updateTable();
|
||||
})
|
||||
|
||||
|
||||
</script>
|
||||
166
POCloud/HTML Templates/overview.html
Normal file
@@ -0,0 +1,166 @@
|
||||
<div class="text-center"><h1>LATEST STROKE</h1></div>
|
||||
<div class='row row-flex box-me'>
|
||||
<div class='col-xs-4 text-center'>
|
||||
<h2>Pump Fill</h2>
|
||||
<div class="gauge-box">
|
||||
<div data-labelheight="10" style="height: 170px; background: transparent; margin: 0 auto;" id="gauge-fillage_percent" data-chart="solidgauge" data-nodename="poc.fillage_percent" data-units="%" data-min="0" data-max="100" data-colors="0.1:#DF5353,0.5:#DDDF0D,0.9:#55BF3B" data-valuefontsize="18px" data-decimalPlaces="2"></div>
|
||||
<span data-timeupdate="fillage_percent"><%= channels["poc.fillage_percent"].timestamp %></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-8'>
|
||||
<div style="height:300px" id="chart-fillage_percent" data-chart="chart" data-nodename1="poc.fillage_percent" data-datalabel1="Fill" data-daysofhistory="2" data-chartlabel="" data-ylabel="" data-xlabel="Date" data-units="%"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class='row row-flex box-me'>
|
||||
<div class='col-xs-4 text-center'>
|
||||
<h2>Fluid Level</h2>
|
||||
<div class="gauge-box">
|
||||
<div data-labelheight="10" style="height: 170px; background: transparent; margin: 0 auto;" id="gauge-fluid_above_pump" data-chart="solidgauge" data-nodename="poc.fluid_above_pump" data-units="ft." data-min="0" data-max="10000" data-colors="0.1:#DF5353,0.5:#DDDF0D,0.9:#55BF3B" data-valuefontsize="18px" data-decimalPlaces="2"></div>
|
||||
<span data-timeupdate="fluid_above_pump"><%= channels["poc.fluid_above_pump"].timestamp %></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-8'>
|
||||
<div style="height:300px" id="chart-fluid_above_pump" data-chart="chart" data-nodename1="poc.fluid_above_pump" data-datalabel1="Fluid Level" data-daysofhistory="2" data-chartlabel="" data-ylabel="" data-xlabel="Date" data-units="ft."></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class='row row-flex box-me'>
|
||||
<div class='col-xs-4 text-center'>
|
||||
<h2>Stroke Speed</h2>
|
||||
<div class="gauge-box">
|
||||
<div data-labelheight="10" style="height: 170px; background: transparent; margin: 0 auto;" id="gauge-SPM" data-chart="solidgauge" data-nodename="poc.SPM" data-units="SPM" data-min="0" data-max="12" data-colors="0.1:#DF5353,0.5:#DDDF0D,0.9:#55BF3B" data-valuefontsize="18px" data-decimalPlaces="2"></div>
|
||||
<span data-timeupdate="SPM"><%= channels["poc.SPM"].timestamp %></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-8'>
|
||||
<div style="height:300px" id="chart-SPM" data-chart="chart" data-nodename1="poc.SPM" data-datalabel1="Speed" data-daysofhistory="2" data-chartlabel="" data-ylabel="" data-xlabel="Date" data-units="SPM"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class='row row-flex box-me'>
|
||||
<div class='col-xs-4 text-center'>
|
||||
<h2>Pump HP</h2>
|
||||
<div class="gauge-box">
|
||||
<div data-labelheight="10" style="height: 170px; background: transparent; margin: 0 auto;" id="gauge-pump_hp" data-chart="solidgauge" data-nodename="poc.pump_hp" data-units="HP" data-min="0" data-max="10" data-colors="0.1:#DF5353,0.5:#DDDF0D,0.9:#55BF3B" data-valuefontsize="18px" data-decimalPlaces="2"></div>
|
||||
<span data-timeupdate="pump_hp"><%= channels["poc.pump_hp"].timestamp %></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-8'>
|
||||
<div style="height:300px" id="chart-pump_hp" data-chart="chart" data-nodename1="poc.pump_hp" data-datalabel1="HP" data-daysofhistory="2" data-chartlabel="" data-ylabel="" data-xlabel="Date" data-units="HP"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class='row row-flex box-me'>
|
||||
<div class='col-xs-4 text-center'>
|
||||
<h2>Fluid Load</h2>
|
||||
<div class="gauge-box">
|
||||
<div data-labelheight="10" style="height: 170px; background: transparent; margin: 0 auto;" id="gauge-downhole_fluid_load" data-chart="solidgauge" data-nodename="poc.downhole_fluid_load" data-units="lbs." data-min="0" data-max="10000" data-colors="0.1:#DF5353,0.5:#55BF3B,0.9:#DF5353" data-valuefontsize="18px" data-decimalPlaces="2"></div>
|
||||
<span data-timeupdate="pump_hp"><%= channels["poc.downhole_fluid_load"].timestamp %></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-8'>
|
||||
<div style="height:300px" id="chart-downhole_fluid_load" data-chart="chart" data-nodename1="poc.downhole_fluid_load" data-datalabel1="Fluid Load" data-daysofhistory="2" data-chartlabel="" data-ylabel="" data-xlabel="Date" data-units="lbs."></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="text-center"><h1>TODAY'S TOTALS</h1></div>
|
||||
<div class='row row-flex'>
|
||||
<div class='col-xs-4 text-center box-me'>
|
||||
<h2>Percent Run</h2>
|
||||
<div class="gauge-box">
|
||||
<div data-labelheight="10" style="height: 170px; background: transparent; margin: 0 auto;" id="gauge1" data-chart="solidgauge" data-nodename="poc.dt_percent_run" data-units="%" data-min="0" data-max="100" data-colors="0.1:#DF5353,0.5:#DDDF0D,0.9:#55BF3B" data-decimalPlaces="2"></div>
|
||||
<span data-timeupdate="dt_percent_run"><%= channels["poc.dt_percent_run"].timestamp %></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-4 text-center box-me'>
|
||||
<h2>Average Speed</h2>
|
||||
<div class="gauge-box">
|
||||
<div data-labelheight="10" style="height: 170px; background: transparent; margin: 0 auto;" id="gauge2" data-chart="solidgauge" data-nodename="poc.dt_average_spm" data-units="SPM" data-min="0" data-max="20" data-colors="0.1:#DF5353,0.5:#DDDF0D,0.9:#55BF3B" data-decimalPlaces="2"></div>
|
||||
<span data-timeupdate="dt_average_spm"><%= channels["poc.dt_average_spm"].timestamp %></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-4 text-center box-me'>
|
||||
<h2>Calculated Production</h2>
|
||||
<div class="gauge-box">
|
||||
<div data-labelheight="10" style="height: 170px; background: transparent; margin: 0 auto;" id="gauge3" data-chart="solidgauge" data-nodename="poc.dt_calculated_production" data-units="BBL" data-min="0" data-max="1000" data-colors="0.1:#DF5353,0.5:#DDDF0D,0.9:#55BF3B" data-decimalPlaces="2"></div>
|
||||
<span data-timeupdate="dt_calculated_production"><%= channels["poc.dt_calculated_production"].timestamp %></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-4 text-center box-me'>
|
||||
<h2>Projected Production</h2>
|
||||
<div class="gauge-box">
|
||||
<div data-labelheight="10" style="height: 170px; background: transparent; margin: 0 auto;" id="gauge4" data-chart="solidgauge" data-nodename="poc.dt_projected_production" data-units="BBL" data-min="0" data-max="1000" data-colors="0.1:#DF5353,0.5:#DDDF0D,0.9:#55BF3B" data-decimalPlaces="2"></div>
|
||||
<span data-timeupdate="dt_projected_production"><%= channels["poc.dt_projected_production"].timestamp %></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-4 text-center box-me'>
|
||||
<h2>Pump Intake Pressure</h2>
|
||||
<div class="gauge-box">
|
||||
<div data-labelheight="10" style="height: 170px; background: transparent; margin: 0 auto;" id="gauge5" data-chart="solidgauge" data-nodename="poc.dt_pump_intake_pressure" data-units="PSI" data-min="0" data-max="2000" data-colors="0.1:#DF5353,0.5:#DDDF0D,0.9:#55BF3B" data-decimalPlaces="2"></div>
|
||||
<span data-timeupdate="dt_pump_intake_pressure"><%= channels["poc.dt_pump_intake_pressure"].timestamp %></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class='col-xs-4 text-center box-me'>
|
||||
<h2>Energy Consumed</h2>
|
||||
<div class="gauge-box">
|
||||
<div data-labelheight="10" style="height: 170px; background: transparent; margin: 0 auto;" id="gauge6" data-chart="solidgauge" data-nodename="poc.dt_kWh" data-units="kWh" data-min="0" data-max="200" data-colors="0.1:#DF5353,0.5:#DDDF0D,0.9:#55BF3B" data-decimalPlaces="2"></div>
|
||||
<span data-timeupdate="dt_kWh"><%= channels["poc.dt_kWh"].timestamp %></span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<style>
|
||||
.box-me {
|
||||
position: relative;
|
||||
padding: 0.5em;
|
||||
padding-bottom: 1.5em;
|
||||
border: 1px solid #eee;
|
||||
/*margin: 1em 0;*/
|
||||
}
|
||||
.box-me .gauge-box {
|
||||
margin-top: -0.25em;
|
||||
}
|
||||
|
||||
.pad15 {
|
||||
margin: 15px 15px;
|
||||
}
|
||||
|
||||
.box-me h2 {
|
||||
text-transform: uppercase;
|
||||
font-size: 14px;
|
||||
color: #666;
|
||||
font-weight: 400;
|
||||
letter-spacing: 1px;
|
||||
z-index: 100;
|
||||
}
|
||||
.dynamic-chart-form {
|
||||
background-color: whiteSmoke;
|
||||
padding: 1em 0.5em;
|
||||
margin-top: 1em;
|
||||
}
|
||||
|
||||
.row-flex {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
.row-flex > [class*='col-'] {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
#systemStatusTimelineContainer h2 {
|
||||
text-transform: uppercase;
|
||||
font-size: 14px;
|
||||
color: #666;
|
||||
font-weight: 400;
|
||||
letter-spacing: 1px;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
</style>
|
||||
311
POCloud/HTML Templates/setup.html
Normal file
@@ -0,0 +1,311 @@
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
<div class="col-xs-6">
|
||||
<h1>Rod String & Pump</h1>
|
||||
<br />
|
||||
<div id="output">
|
||||
<canvas id="taperCanvas" width="300" height="500"></canvas>
|
||||
<!-- <%= channels["poc.well_setup"].value %> -->
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-xs-5">
|
||||
<br />
|
||||
<ul class="nav nav-tabs" id="taperTabs" role="tablist">
|
||||
</ul>
|
||||
<table class="table tablestriped" >
|
||||
<thead>
|
||||
<tr><th>Measurement</th><th>Value</th></tr>
|
||||
</thead>
|
||||
<tbody id="data_table">
|
||||
|
||||
<tr><td>Length</td><td><span id="tLength"></span> ft.</td></tr>
|
||||
<tr><td>Diameter</td><td><span id="tDiameter"></span> in.</td></tr>
|
||||
<tr><td>Material</td><td><span id="tMaterial"></span></td></tr>
|
||||
<tr><td>Damping Factor</td><td><span id="tDamping"></span></td></tr>
|
||||
</tbody>
|
||||
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-xs-5">
|
||||
<h1>Motor Data</h1>
|
||||
<table class="table tablestriped" >
|
||||
<thead>
|
||||
<tr><th>Measurement</th><th>Value</th></tr>
|
||||
</thead>
|
||||
<tbody id="motor_data_table">
|
||||
<tr><td>Volts</td><td><span id="mVolts"></span> V</td></tr>
|
||||
<tr><td>Hertz</td><td><span id="mHertz"></span> Hz</td></tr>
|
||||
<tr><td>Poles</td><td><span id="mPoles"></span></td></tr>
|
||||
<tr><td>Amps</td><td><span id="mAmps"></span> A</td></tr>
|
||||
<tr><td>Horsepower</td><td><span id="mHorsepower"></span> HP</td></tr>
|
||||
<tr><td>Service Factor</td><td><span id="mServiceFactor"></span></td></tr>
|
||||
<tr><td>RPM</td><td><span id="mRPM"></span> RPM</td></tr>
|
||||
<tr><td>Motor Sheave</td><td><span id="mMotorSheave"></span> in.</td></tr>
|
||||
<tr><td>Gearbox Rating</td><td><span id="mGbxRating"></span> x 1000 in-lbs</td></tr>
|
||||
<tr><td>Gearbox Ratio</td><td><span id="mGbxRatio"></span></td></tr>
|
||||
<tr><td>Gearbox Limit</td><td><span id="mGbxLimit"></span></td></tr>
|
||||
<tr><td>Gearbox Sheave</td><td><span id="mGbxSheave"></span> in.</td></tr>
|
||||
<tr><td>Max Frequency</td><td><span id="mMaxFreq"></span> Hz</td></tr>
|
||||
<tr><td>Min RPM</td><td><span id="mMinRPM"></span> RPM</td></tr>
|
||||
<tr><td>Max RPM</td><td><span id="mMaxRPM"></span> RPM</td></tr>
|
||||
</tbody>
|
||||
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="col-xs-5 col-xs-offset-1">
|
||||
<h1>Well Parameters</h1>
|
||||
<table class="table tablestriped" >
|
||||
<thead>
|
||||
<tr><th>Measurement</th><th>Value</th></tr>
|
||||
</thead>
|
||||
<tbody id="well_data_table">
|
||||
<tr><td>API Gravity Oil</td><td><span id="wAPIOil"></span></td></tr>
|
||||
<tr><td>Specific Gravity Water</td><td><span id="wSGWater"></span></td></tr>
|
||||
<tr><td>Young's Modulus (Steel)</td><td><span id="wYMSteel"></span> x 10^6</td></tr>
|
||||
<tr><td>Young's Modulus (Fiberglass)</td><td><span id="wYMFiberglass"></span> x 10^6</td></tr>
|
||||
<tr><td>Water Cut</td><td><span id="wWaterCut"></span> %</td></tr>
|
||||
<tr><td>Casing ID</td><td><span id="wCasingID"></span> in.</td></tr>
|
||||
<tr><td>Tubing OD</td><td><span id="wTubingOD"></span> in.</td></tr>
|
||||
<tr><td>Tubing ID</td><td><span id="wTubingID"></span> in.</td></tr>
|
||||
<tr><td>Tubing Anchor Depth</td><td><span id="wAnchorDepth"></span> ft.</td></tr>
|
||||
<tr><td>Pump Diameter</td><td><span id="wPumpDiameter"></span> in.</td></tr>
|
||||
<tr><td>Pump Constant</td><td><span id="wPumpConstant"></span></td></tr>
|
||||
<tr><td>Structural Rating</td><td><span id="wStructuralRating"></span> x 100 lbs.</td></tr>
|
||||
<tr><td>Motor Control Mode</td><td><span id="wMotorControlMode"></span></td></tr>
|
||||
<tr><td>Total Vertical Depth</td><td><span id="wTotalVerticalDepth"></span> ft.</td></tr>
|
||||
<tr><td>Well Type</td><td><span id="wWellType"></span></td></tr>
|
||||
<tr><td>Surface Stroke Length</td><td><span id="wSurfaceStrokeLength"></span> in.</td></tr>
|
||||
</tbody>
|
||||
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- {"backupData":{
|
||||
a"Youngs_Modulus_Steel":"30.5",
|
||||
"unitConfig":{
|
||||
a"SG_Water":"1.25",
|
||||
a"API_Oil":"1.1",
|
||||
a"Gearbox_Sheave":"35.75",
|
||||
a"Percent_Water":"91.667",
|
||||
a"Total_Vertical_Depth_Input":"12000.0",
|
||||
a"Tubing_Size_ID":"1.995",
|
||||
a"Anchor_Depth":"8923.0",
|
||||
a"motorNameplate":{
|
||||
a "Volts":"480.0",
|
||||
a "Hertz":"60.0",
|
||||
a "Poles":"6",
|
||||
a "Amps":"52.0",
|
||||
a "Horsepower":"40.0",
|
||||
a "ServiceFactor":"1.15",
|
||||
a "RPM":"1100.0"
|
||||
},
|
||||
a"RPM_Minimum":"0.0",
|
||||
a"Total_Vertical_Depth":"10395.0",
|
||||
a"Tubing_Size_OD":"2.375",
|
||||
a"Pump_Diameter":"1.25",
|
||||
a"Motor_Sheave_Size":"4.75",
|
||||
a"Rating_Gearbox":"320.0",
|
||||
a"Gearbox_Limit":"100.0",
|
||||
a"Rating_Structural":"305.0",
|
||||
a"Speed_Torque_Mode":"3",
|
||||
a"Pump_Constant":"0.0",
|
||||
a"Gearbox_Ratio":"28.7",
|
||||
"Well_Type":"0",
|
||||
a"MaxFreq":"155.0",
|
||||
a"RPM_Maximum":"2500.0",
|
||||
a"MotorCntrlMode":"3",
|
||||
"Total_Stroke_Length":"99.0"
|
||||
},
|
||||
"Youngs_Modulus_Fiberglass":"7.2",
|
||||
"Casing_ID":"4.892",
|
||||
"taper":[
|
||||
{"setup":
|
||||
{
|
||||
"Diameter":"0.0",
|
||||
"c":"0.0",
|
||||
"Material":"0",
|
||||
"Length":"0.0",
|
||||
"RodCount":"0",
|
||||
"UseRodCount":"0"
|
||||
}
|
||||
},
|
||||
{"setup":{"Diameter":"0.75","c":"0.08","Material":"1","Length":"10095.0","RodCount":"0","UseRodCount":"0"}},{"setup":{"Diameter":"1.5","c":"0.08","Material":"1","Length":"300.0","RodCount":"0","UseRodCount":"0"}},{"setup":{"Diameter":"1.5","c":"0.08","Material":"1","Length":"0.0","RodCount":"0","UseRodCount":"0"}},{"setup":{"Diameter":"0.75","c":"0.0","Material":"1","Length":"0.0","RodCount":"0","UseRodCount":"0"}},{"setup":{"Diameter":"1.5","c":"0.0","Material":"1","Length":"0.0","RodCount":"0","UseRodCount":"0"}},{"setup":{"Diameter":"1.5","c":"0.0","Material":"1","Length":"0.0","RodCount":"0","UseRodCount":"0"}},{"setup":{"Diameter":"0.0","c":"0.0","Material":"1","Length":"0.0","RodCount":"0","UseRodCount":"0"}},{"setup":{"Diameter":"0.0","c":"0.0","Material":"1","Length":"0.0","RodCount":"0","UseRodCount":"0"}},{"setup":{"Diameter":"0.0","c":"0.0","Material":"1","Length":"0.0","RodCount":"0","UseRodCount":"0"}},{"setup":{"Diameter":"0.0","c":"0.0","Material":"1","Length":"0.0","RodCount":"0","UseRodCount":"0"}}]}} -->
|
||||
|
||||
<script>
|
||||
var raw = <%= channels["poc.well_setup"].value %>;
|
||||
// var parsed = JSON.stringify(raw);
|
||||
var backupData = raw.backupData;
|
||||
var taperData = backupData.taper;
|
||||
var unitConfig = backupData.unitConfig;
|
||||
|
||||
var taper_sel = 1;
|
||||
|
||||
var taperLength = [];
|
||||
var taperWidth = [];
|
||||
var rodDepth = [];
|
||||
var taperDepth = [];
|
||||
|
||||
var context = document.getElementById("taperCanvas").getContext("2d");
|
||||
var canvas_width = $('#taperCanvas').width();
|
||||
var canvas_height = $('#taperCanvas').height();
|
||||
var pump_height = 100;
|
||||
var taper_height = canvas_height - pump_height;
|
||||
var max_width = canvas_width * 0.5;
|
||||
var total_depth = parseFloat(unitConfig.Total_Vertical_Depth);
|
||||
var currentDepth = 0;
|
||||
for(var i = 0; i< taperData.length; i++){
|
||||
if (taperData[i].setup.Length > 0.0){
|
||||
$('#taperTabs').append('<li class="t_tab" id="taper'+i+'" taper='+i+'><a href="#" style="color:#337AB7">'+i+'</a></li>');
|
||||
taperLength.push((taperData[i].setup.Length / total_depth) * taper_height);
|
||||
taperWidth.push(taperData[i].setup.Diameter * (max_width / 2.5));
|
||||
rodDepth.push(currentDepth);
|
||||
taperDepth.push((currentDepth /total_depth) * taper_height);
|
||||
currentDepth = currentDepth + parseFloat(taperData[i].setup.Length);
|
||||
|
||||
}
|
||||
}
|
||||
console.log({rodDepth:rodDepth});
|
||||
console.log({taperDepth:taperDepth});
|
||||
var drawTapers = function(active){
|
||||
var horiz_space = 0;
|
||||
var gradient=context.createLinearGradient((canvas_width - max_width) / 2,0,((canvas_width - max_width) / 2)+max_width,0);
|
||||
gradient.addColorStop(0,"black");
|
||||
gradient.addColorStop(0.5,"white");
|
||||
gradient.addColorStop(1,"black");
|
||||
context.fillStyle=gradient;
|
||||
context.lineWidth = 2;
|
||||
|
||||
|
||||
|
||||
for (i=0;i<taperLength.length; i++){
|
||||
horiz_space = (canvas_width - taperWidth[i]) / 2;
|
||||
context.beginPath();
|
||||
context.moveTo(horiz_space, taperDepth[i]);
|
||||
context.lineTo(horiz_space + taperWidth[i], taperDepth[i]);
|
||||
context.lineTo(horiz_space + taperWidth[i], taperDepth[i]+ taperLength[i]);
|
||||
context.lineTo(horiz_space, taperDepth[i] + taperLength[i]);
|
||||
context.lineTo(horiz_space, taperDepth[i]);
|
||||
|
||||
console.log(active);
|
||||
if (i+1 == active){
|
||||
context.strokeStyle = 'red';
|
||||
} else {
|
||||
context.strokeStyle = 'black';
|
||||
}
|
||||
// context.strokeStyle = 'black';
|
||||
|
||||
context.fill();
|
||||
context.stroke()
|
||||
context.closePath();
|
||||
|
||||
|
||||
}
|
||||
var pump_width = unitConfig.Pump_Diameter * (max_width/2.5);
|
||||
horiz_space = (canvas_width - pump_width) / 2;
|
||||
if (active =="Unit"){
|
||||
context.strokeStyle = 'red';
|
||||
} else {
|
||||
context.strokeStyle = 'black';
|
||||
}
|
||||
// context.strokeStyle = 'blue';
|
||||
context.beginPath();
|
||||
context.moveTo(horiz_space, taper_height);
|
||||
context.lineTo(horiz_space + pump_width, taper_height);
|
||||
context.lineTo(horiz_space + pump_width, taper_height + pump_height);
|
||||
context.lineTo(horiz_space, taper_height + pump_height);
|
||||
context.lineTo(horiz_space, taper_height);
|
||||
context.fill();
|
||||
context.stroke()
|
||||
context.closePath();
|
||||
|
||||
}
|
||||
drawTapers(1);
|
||||
|
||||
var updateTable = function(taper_selected){
|
||||
$('#data_table').html('<tr><td>Length</td><td><span id="tLength"></span> ft.</td></tr><tr><td>Diameter</td><td><span id="tDiameter"></span> in.</td></tr><tr><td>Material</td><td><span id="tMaterial"></span></td></tr><tr><td>Damping Factor</td><td><span id="tDamping"></span></td></tr>');
|
||||
var material;
|
||||
$('#tLength').text(taperData[taper_selected].setup.Length);
|
||||
$('#tDiameter').text(taperData[taper_selected].setup.Diameter);
|
||||
if (parseInt(taperData[taper_selected].setup.Material) == 1){
|
||||
material = "Steel";
|
||||
} else if (parseInt(taperData[taper_selected].setup.Material) == 2){
|
||||
material = "Fiberglass";
|
||||
}
|
||||
$('#tMaterial').text(material);
|
||||
$('#tDamping').text(taperData[taper_selected].setup.c);
|
||||
|
||||
}
|
||||
|
||||
updateTable(1);
|
||||
|
||||
|
||||
$('#taper1').addClass('active');
|
||||
|
||||
$(".t_tab").click(function(){
|
||||
$("#taperTabs>.active").removeClass('active');
|
||||
$(this).addClass('active');
|
||||
taper_sel = $(this).attr("taper");
|
||||
if (taper_sel != "Unit"){
|
||||
updateTable(taper_sel);
|
||||
} else {
|
||||
$("#data_table").html("<tr><td>Pump Diameter</td><td>"+ taperData["Unit"].Pump_Diameter+ " in.</td></tr>");
|
||||
}
|
||||
drawTapers(taper_sel);
|
||||
})
|
||||
|
||||
|
||||
$('#mVolts').text(unitConfig.motorNameplate.Volts);
|
||||
$('#mHertz').text(unitConfig.motorNameplate.Hertz);
|
||||
$('#mPoles').text(unitConfig.motorNameplate.Poles);
|
||||
$('#mAmps').text(unitConfig.motorNameplate.Amps);
|
||||
$('#mHorsepower').text(unitConfig.motorNameplate.Horsepower);
|
||||
$('#mServiceFactor').text(unitConfig.motorNameplate.ServiceFactor);
|
||||
$('#mRPM').text(unitConfig.motorNameplate.RPM);
|
||||
|
||||
$('#mMotorSheave').text(unitConfig.Motor_Sheave_Size);
|
||||
$('#mGbxRating').text(unitConfig.Rating_Gearbox);
|
||||
$('#mGbxRatio').text(unitConfig.Gearbox_Ratio);
|
||||
$('#mGbxLimit').text(unitConfig.Gearbox_Limit);
|
||||
$('#mGbxSheave').text(unitConfig.Gearbox_Sheave);
|
||||
$('#mMaxFreq').text(unitConfig.MaxFreq);
|
||||
$('#mMinRPM').text(unitConfig.RPM_Minimum);
|
||||
$('#mMaxRPM').text(unitConfig.RPM_Maximum);
|
||||
|
||||
$('#wAPIOil').text(unitConfig.API_Oil);
|
||||
$('#wSGWater').text(unitConfig.SG_Water);
|
||||
$('#wYMSteel').text(backupData.Youngs_Modulus_Steel);
|
||||
$('#wYMFiberglass').text(backupData.Youngs_Modulus_Fiberglass);
|
||||
$('#wWaterCut').text(unitConfig.Percent_Water);
|
||||
$('#wTubingOD').text(unitConfig.Tubing_Size_OD);
|
||||
$('#wTubingID').text(unitConfig.Tubing_Size_ID);
|
||||
$('#wCasingID').text(backupData.Casing_ID);
|
||||
$('#wAnchorDepth').text(unitConfig.Anchor_Depth);
|
||||
$('#wPumpDiameter').text(unitConfig.Pump_Diameter);
|
||||
$('#wPumpConstant').text(unitConfig.Pump_Constant);
|
||||
$('#wStructuralRating').text(unitConfig.Rating_Structural);
|
||||
|
||||
if (parseInt(unitConfig.MotorCntrlMode) == 3){
|
||||
$('#wMotorControlMode').text("Torque");
|
||||
} else if (parseInt(unitConfig.MotorCntrlMode) == 1) {
|
||||
$('#wMotorControlMode').text("Speed");
|
||||
}
|
||||
|
||||
$('#wTotalVerticalDepth').text(unitConfig.Total_Vertical_Depth);
|
||||
|
||||
if (parseInt(unitConfig.Well_Type) == 0){
|
||||
$('#wWellType').text("Vertical");
|
||||
} else if (parseInt(unitConfig.Well_Type) == 1) {
|
||||
$('#wWellType').text("Directional");
|
||||
}
|
||||
|
||||
$('#wSurfaceStrokeLength').text(unitConfig.Total_Stroke_Length);
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
12
POCloud/config.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
|
||||
{
|
||||
|
||||
"driverFileName":"poc.py",
|
||||
"deviceName":"poc",
|
||||
"driverId":"0050",
|
||||
"releaseVersion":"5",
|
||||
"files": {
|
||||
"file1":"poc.py",
|
||||
"file2":"modbusMap.p"}
|
||||
|
||||
}
|
||||
385
POCloud/direct/poc.py
Normal file
@@ -0,0 +1,385 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
|
||||
import threading
|
||||
import time
|
||||
import pickle
|
||||
from device_base import deviceBase
|
||||
import traceback
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from collections import deque
|
||||
|
||||
data_source = "PLC"
|
||||
plc_ip = '192.168.1.10'
|
||||
|
||||
|
||||
def readTag(addr, tag):
|
||||
c = ClxDriver()
|
||||
if c.open(addr):
|
||||
try:
|
||||
v = c.read_tag(tag)
|
||||
# print(v)
|
||||
return v
|
||||
except Exception:
|
||||
err = c.get_status()
|
||||
c.close()
|
||||
print err
|
||||
pass
|
||||
c.close()
|
||||
|
||||
|
||||
def readArray(addr, tag, start, end):
|
||||
c = ClxDriver()
|
||||
if c.open(addr):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
v = c.read_tag(tag_w_index)
|
||||
# print('{} - {}'.format(tag_w_index, v))
|
||||
arr_vals.append(round(v[0], 4))
|
||||
# print(v)
|
||||
if len(arr_vals) > 0:
|
||||
return arr_vals
|
||||
else:
|
||||
return False
|
||||
print("No length for {}".format(addr))
|
||||
except Exception:
|
||||
err = c.get_status()
|
||||
c.close()
|
||||
print err
|
||||
pass
|
||||
c.close()
|
||||
|
||||
|
||||
def writeTag(addr, tag, val):
|
||||
c = ClxDriver()
|
||||
if c.open(addr):
|
||||
try:
|
||||
# typ = getTagType(addr, tag)
|
||||
cv = c.read_tag(tag)
|
||||
wt = c.write_tag(tag, val, cv[1])
|
||||
# print(wt)
|
||||
return wt
|
||||
except Exception:
|
||||
err = c.get_status()
|
||||
c.close()
|
||||
print err
|
||||
pass
|
||||
c.close()
|
||||
|
||||
|
||||
class Channel():
|
||||
global plc_ip
|
||||
|
||||
def __init__(self, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec):
|
||||
self.mesh_name = mesh_name
|
||||
self.plc_tag = plc_tag
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
|
||||
def read(self, forceSend):
|
||||
if self.plc_tag:
|
||||
v = readTag(plc_ip, self.plc_tag)
|
||||
if v:
|
||||
if self.data_type == 'boolean' or self.data_type == 'str':
|
||||
if (self.last_send_time == 0) or (self.value is None) or not (self.value == v[0]) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend):
|
||||
self.last_value = self.value
|
||||
self.value = v[0]
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
if (self.last_send_time == 0) or (self.value is None) or (abs(self.value - v[0]) > self.chg_threshold) or ((time.time() - self.last_send_time) > self.guarantee_sec) or (forceSend):
|
||||
self.last_value = self.value
|
||||
self.value = v[0]
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
go_channels = {
|
||||
"percent_run": Channel('go_percent_run', 'GAUGEOFF_Percent_Run', 'float', 0, 0),
|
||||
"kWh": Channel('go_kwh', 'GAUGEOFF_kWh', 'float', 0, 0),
|
||||
'kWh_regen': Channel('go_kwh_regen', 'GAUGEOFF_kWh_regen', 'float', 0, 0),
|
||||
"electricity_cost": Channel('go_electricity_cost', 'GAUGEOFF_Electricity_Cost', 'float', 0, 0),
|
||||
'peak_load': Channel('go_peak_load', 'GAUGEOFF_Max_Load', 'float', 0, 0),
|
||||
'min_load': Channel('go_min_load', 'GAUGEOFF_Min_Load', 'float', 0, 0),
|
||||
'polished_rod_HP': Channel('go_polished_rod_hp', 'GAUGEOFF_Polished_Rod_HP', 'float', 0, 0),
|
||||
'average_SPM': Channel('go_average_spm', "GAUGEOFF_Average_SPM", 'float', 0, 0),
|
||||
'lifting_cost': Channel('go_lifting_cost', "GAUGEOFF_Lifting_Cost", 'float', 0, 0),
|
||||
'full_card_production': Channel('go_full_card_production', "GAUGEOFF_Full_Card_Production", 'float', 0, 0),
|
||||
'fluid_above_pump': Channel('go_fluid_above_pump', 'GAUGEOFF_Fluid_Above_Pump', 'float', 0, 0),
|
||||
'production_calculated': Channel('go_production_calculated', 'GAUGEOFF_Production_Calculated', 'float', 0, 0),
|
||||
'inflow_rate': Channel('go_inflow_rate', 'GAUGEOFF_Inflow_Rate', 'float', 0, 0),
|
||||
'pump_intake_pressure': Channel('go_pump_intake_pressure', 'GAUGEOFF_pump_intake_pressure', 'float', 0, 0)
|
||||
}
|
||||
|
||||
statusCh = Channel('status', 'Pump.Run_Status', 'str', 0, 0)
|
||||
gaugeOffCh = Channel('go', 'Gauge_Off_Command', 'boolean', 0, 0)
|
||||
|
||||
channels = {
|
||||
'downhole_adjusted_gross_stroke': Channel('downhole_adjusted_gross_stroke', 'Card_Past[1].Downhole_AdjustedGrossStroke', 'float', 5.0, 3600),
|
||||
'downhole_fluid_load': Channel('downhole_fluid_load', 'Card_Past[1].Downhole_FluidLoad', 'float', 100.0, 3600),
|
||||
'downhole_gross_stroke': Channel('downhole_gross_stroke', 'Card_Past[1].Downhole_GrossStroke', 'float', 1.0, 3600),
|
||||
'downhole_max_position': Channel('downhole_max_position', 'Card_Past[1].Downhole_Max_Position.Position', 'float', 1.0, 3600),
|
||||
'downhole_min_position': Channel('downhole_min_position', 'Card_Past[1].Downhole_Min_Position.Position', 'float', 1.0, 3600),
|
||||
'downhole_net_stroke': Channel('downhole_net_stroke', 'Card_Past[1].Downhole_NetStroke', 'float', 1.0, 3600),
|
||||
'fillage_percent': Channel('fillage_percent', 'Card_Past[1].Fillage_Percent', 'float', 1.0, 3600),
|
||||
'fluid_above_pump': Channel('fluid_above_pump', 'Card_Past[1].Fluid_Above_Pump', 'float', 10.0, 3600),
|
||||
'fluid_gradient': Channel('fluid_gradient', 'Card_Past[1].Params.Fluid_Gradient', 'float', 0, 3600),
|
||||
'polished_rod_hp': Channel('polished_rod_hp', 'Card_Past[1].Polished_Rod_HP', 'float', 0.5, 3600),
|
||||
'pump_hp': Channel('pump_hp', 'Card_Past[1].Pump_HP', 'float', 0.5, 3600),
|
||||
'pump_intake_pressure': Channel('pump_intake_pressure', 'Card_Past[1].Pump_Intake_Pressure', 'float', 10.0, 3600),
|
||||
'stroke_production': Channel('stroke_production', 'Stroke_Production', 'float', 0.0005, 3600),
|
||||
'surface_max_load': Channel('surface_max_load', 'Card_Past[1].Surface_Max.Load', 'float', 100.0, 3600),
|
||||
'surface_min_load': Channel('surface_min_load', 'Card_Past[1].Surface_Min.Load', 'float', 100.0, 3600),
|
||||
'surface_stroke_length': Channel('surface_stroke_length', 'Card_Past[1].Surface_StrokeLength', 'float', 1.0, 3600),
|
||||
'tubing_movement': Channel('tubing_movement', 'Card_Past[1].Tubing_Movement', 'float', 1.0, 3600),
|
||||
'SPM': Channel('SPM', 'Card_Past[1].SPM', 'float', 0.5, 3600),
|
||||
# 'drive_torque_mode': Channel('drive_torque_mode', 'DriveTorqueMode', 'boolean', 0, 3600),
|
||||
'dt': Channel('dt', 'Card_Past[1].Params.dt', 'float', 0.001, 3600),
|
||||
# 'speed_reference': Channel('speed_reference', 'Pump_PF755.PSet_SpeedRef', 'float', 5.0, 3600),
|
||||
'stuffing_box_friction': Channel('stuffing_box_friction', 'Card_Past[1].Params.Stuffing_Box_Friction', 'float', 1.0, 3600),
|
||||
# 'torque_reference': Channel('torque_reference', 'PF755_Drive:O.TrqRefAStpt', 'float', 1.0, 3600),
|
||||
'tubing_head_pressure': Channel('tubing_head_pressure', 'Card_Past[1].Params.Tubing_Head_Pressure', 'float', 5.0, 3600),
|
||||
}
|
||||
|
||||
dt_channels = { # Current Daily Totals
|
||||
'Average_SPM': Channel('dt_average_spm', 'TODAY_Average_SPM', 'float', 0.5, 3600),
|
||||
'Calculated_Production': Channel('dt_calculated_production', 'TODAY_Production_Calculated', 'float', 10.0, 3600),
|
||||
'Downhole_Net_Stroke': Channel('dt_downhole_net_stroke', 'TODAY_Downhole_NetStroke', 'float', 1.0, 3600),
|
||||
'Electricity_Cost': Channel('dt_electricity_cost', 'TODAY_Electricity_Cost', 'float', 0.1, 3600),
|
||||
'Fluid_Level': Channel('dt_fluid_level', 'TODAY_Fluid_Above_Pump', 'float', 10.0, 3600),
|
||||
'Full_Card_Production': Channel('dt_full_card_production', 'TODAY_Full_Card_Production', 'float', 10.0, 3600),
|
||||
'Inflow_Rate': Channel('dt_inflow_rate', 'TODAY_Inflow_Rate', 'float', 10.0, 3600),
|
||||
'Lifting_Cost': Channel('dt_lifting_cost', 'TODAY_Lifting_Cost', 'float', 0.01, 3600),
|
||||
'Min_Load': Channel('dt_min_load', 'TODAY_Min_Load', 'float', 100.0, 3600),
|
||||
'Peak_Load': Channel('dt_peak_load', 'TODAY_Max_Load', 'float', 100.0, 3600),
|
||||
'Percent_Run': Channel('dt_percent_run', 'TODAY_Percent_Run', 'float', 1.0, 3600),
|
||||
'Polished_Rod_HP': Channel('dt_polished_rod_hp', 'TODAY_Polished_Rod_HP', 'float', 1.0, 3600),
|
||||
'Projected_Production': Channel('dt_projected_production', 'TODAY_Production_Projected', 'float', 5.0, 3600),
|
||||
'Pump_HP': Channel('dt_pump_hp', 'TODAY_Pump_HP', 'float', 1.0, 3600),
|
||||
'Pump_Intake_Presure': Channel('dt_pump_intake_pressure', 'TODAY_Pump_Intake_Pressure', 'float', 10.0, 3600),
|
||||
'Surface_Stroke_Length': Channel('dt_surface_stroke_length', 'TODAY_Surface_StrokeLength', 'float', 1.0, 3600),
|
||||
'Tubing_Movement': Channel('dt_tubing_movement', 'TODAY_Tubing_Movement', 'float', 1.00, 3600),
|
||||
'kWh': Channel('dt_kWh', 'TODAY_kWh', 'float', 10.0, 3600),
|
||||
'kWh_Regen': Channel('dt_kWh_regen', 'TODAY_kWh_Regen', 'float', 1.0, 3600)
|
||||
}
|
||||
|
||||
|
||||
class Card():
|
||||
global plc_ip
|
||||
|
||||
def __init__(self, unified_time):
|
||||
self.sc = []
|
||||
self.dc = []
|
||||
self.sent = False
|
||||
self.read_time = unified_time
|
||||
self.readCard()
|
||||
|
||||
def readCard(self):
|
||||
self.card_id = readTag(plc_ip, "Card_Past[1].ID")[0]
|
||||
self.num_points = int(readTag(plc_ip, "Card_Past[1].Num_Points")[0])
|
||||
print("reading {} from card ID {}".format(self.num_points, self.card_id))
|
||||
|
||||
if self.num_points > 1:
|
||||
|
||||
surf_pos = readArray(plc_ip, 'Card_Past[1].Surface_Position', 1, self.num_points)
|
||||
surf_lod = readArray(plc_ip, 'Card_Past[1].Surface_Load', 1, self.num_points)
|
||||
down_pos = readArray(plc_ip, 'Card_Past[1].Downhole_Position', 1, self.num_points)
|
||||
down_lod = readArray(plc_ip, 'Card_Past[1].Downhole_Load', 1, self.num_points)
|
||||
if surf_pos and surf_lod and down_pos and down_lod:
|
||||
for i in range(0, self.num_points-1):
|
||||
if not (surf_pos[i] == 0.0) and not (surf_lod[i] == 0.0):
|
||||
self.sc.append([surf_pos[i], surf_lod[i]])
|
||||
if not (down_pos[i] == 0.0) and not (down_lod[i] == 0.0):
|
||||
self.dc.append([down_pos[i], down_lod[i]])
|
||||
return True
|
||||
else:
|
||||
print("couldn't get a full set of position/load pairs")
|
||||
return False
|
||||
|
||||
def stringify(self):
|
||||
''' returns a list of two strings [surface card, downhole card]'''
|
||||
sc_str = "["
|
||||
dc_str = "["
|
||||
for i in range(0, len(self.sc)):
|
||||
sc_str = sc_str + "[{},{}],".format(self.sc[i][0], self.sc[i][1])
|
||||
sc_str = sc_str + "[{},{}]]".format(self.sc[0][0], self.sc[0][1])
|
||||
for j in range(0, len(self.dc)):
|
||||
dc_str = dc_str + "[{},{}],".format(self.dc[j][0], self.dc[j][1])
|
||||
dc_str = dc_str + "[{},{}]]".format(self.dc[0][0], self.dc[0][1])
|
||||
return[sc_str, dc_str]
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.forceSend = False
|
||||
self.version = "3"
|
||||
self.device_address = "http://192.168.1.30/"
|
||||
# self.device_address = "http://localhost/"
|
||||
self.cardLoopTimer = 600
|
||||
self.finished = threading.Event()
|
||||
threading.Thread.start(self)
|
||||
self.statusChanged = False
|
||||
self.al_status_last = False
|
||||
self.dl_status_last = False
|
||||
self.card_storage = deque([]) # array of the last x cards
|
||||
self.card_storage_limit = 5
|
||||
self.last_card_sent_time = 0
|
||||
|
||||
# load stored event ID's
|
||||
try:
|
||||
with open('eventIds.p', 'rb') as handle:
|
||||
self.eventIds = pickle.load(handle)
|
||||
|
||||
print "found pickled eventID dictionary: {0}".format(self.eventIds)
|
||||
except:
|
||||
print "couldn't load enent ID's from pickle"
|
||||
self.eventIds = []
|
||||
|
||||
# load stored wellconfig's
|
||||
try:
|
||||
with open('wellSetup.p', 'rb') as handle:
|
||||
self.wellSetup = pickle.load(handle)
|
||||
|
||||
print "Found pickled Well Setup (but it's going to be too long to print)"
|
||||
# print self.wellConfig
|
||||
except:
|
||||
print "couldn't load Well Setup from pickle"
|
||||
self.wellSetup = []
|
||||
|
||||
self.sendtodbJSON("device_address", self.device_address, 0)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
channels["status"]["last_value"] = ""
|
||||
|
||||
def channelCheck(self, c, force):
|
||||
if c.read(force):
|
||||
self.sendtodbJSON(c.mesh_name, c.value, time.time())
|
||||
c.last_send_time = time.time()
|
||||
|
||||
def run(self):
|
||||
self.runLoopStatus = ""
|
||||
while True:
|
||||
if self.forceSend:
|
||||
print "FORCE SEND: TRUE"
|
||||
try:
|
||||
self.statusChanged = False
|
||||
runLoopStatus = "checkStatus"
|
||||
if self.checkStatus():
|
||||
self.statusChanged = True
|
||||
# TODO Add event logic here
|
||||
|
||||
runLoopStatus = "Daily Total Loop"
|
||||
for dt in dt_channels:
|
||||
self.channelCheck(dt_channels[dt], self.forceSend)
|
||||
|
||||
runLoopStatus = "checkGaugeOffData"
|
||||
gaugeOffCh.read(False)
|
||||
go = (gaugeOffCh.value == 1)
|
||||
if go:
|
||||
print("Gauge Off!")
|
||||
for go in go_channels:
|
||||
self.channelCheck(go_channels[go], self.forceSend)
|
||||
|
||||
runLoopStatus = "Stroke Parameter Data"
|
||||
for ch in channels:
|
||||
self.channelCheck(channels[ch], self.forceSend)
|
||||
|
||||
runLoopStatus = "Reading Cards"
|
||||
if len(self.card_storage) > 0:
|
||||
if not readTag(plc_ip, "Card_Past[1].ID")[0] == self.card_storage[0].card_id:
|
||||
current_time = time.time()
|
||||
current_card = Card(current_time)
|
||||
self.sendtodbJSON("card_history", current_card.card_id, current_time)
|
||||
if (current_card.read_time - self.last_card_sent_time) > self.cardLoopTimer or self.forceSend:
|
||||
cards = current_card.stringify()
|
||||
self.sendtodbJSON("sc", cards[0], current_time)
|
||||
self.sendtodbJSON("dc", cards[1], current_time)
|
||||
self.last_card_sent_time = time.time()
|
||||
current_card.sent = True
|
||||
self.card_storage.appendleft(current_card)
|
||||
while len(self.card_storage) > self.card_storage_limit:
|
||||
self.card_storage.pop()
|
||||
if self.statusChanged:
|
||||
for c in self.card_storage:
|
||||
if not c.sent:
|
||||
cstr = c.stringify()
|
||||
self.sendtodbJSON("sc", cstr[0], c.read_time)
|
||||
self.sendtodbJSON("dc", cstr[1], c.read_time)
|
||||
self.last_card_sent_time = time.time()
|
||||
else:
|
||||
current_time = time.time()
|
||||
current_card = Card(current_time)
|
||||
self.sendtodbJSON("card_history", current_card.card_id, current_time)
|
||||
if (current_card.read_time - self.last_card_sent_time) > self.cardLoopTimer or self.forceSend:
|
||||
cards = current_card.stringify()
|
||||
self.sendtodbJSON("sc", cards[0], current_time)
|
||||
self.sendtodbJSON("dc", cards[1], current_time)
|
||||
self.last_card_sent_time = time.time()
|
||||
current_card.sent = True
|
||||
self.card_storage.appendleft(current_card)
|
||||
runLoopStatus = "Complete"
|
||||
time.sleep(3)
|
||||
self.forceSend = False
|
||||
except Exception, e:
|
||||
sleep_timer = 20
|
||||
print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer)
|
||||
traceback.print_exc()
|
||||
time.sleep(sleep_timer)
|
||||
|
||||
def checkStatus(self):
|
||||
statusMap = {
|
||||
0: 'Stopped',
|
||||
1: 'Running',
|
||||
2: 'Pumped Off',
|
||||
3: 'Faulted',
|
||||
4: 'Starting',
|
||||
5: 'Recovering',
|
||||
100: 'Read Error',
|
||||
1000: 'PLC Error',
|
||||
9999: 'No Response'
|
||||
}
|
||||
status = statusMap[int(readTag(plc_ip, "Pump.Run_Status")[0])]
|
||||
|
||||
if status:
|
||||
date = time.time()
|
||||
if statusCh.last_value != status:
|
||||
self.statusChanged = True
|
||||
print "Status has changed from {0} to {1} @ {2}".format(statusCh.last_value, status, time.time())
|
||||
else:
|
||||
self.statusChanged = False
|
||||
return False
|
||||
|
||||
if self.statusChanged or self.forceSend:
|
||||
self.status = status
|
||||
self.sendtodb("status", status, date)
|
||||
statusCh.last_value = status
|
||||
return status
|
||||
|
||||
def poc_sync(self, name, value):
|
||||
self.sendtodb("connected", "true", 0)
|
||||
return True
|
||||
|
||||
def poc_set_address(self, name, value):
|
||||
self.device_address = value
|
||||
return True
|
||||
|
||||
def poc_refresh_data(self, name, value):
|
||||
self.forceSend = True
|
||||
return True
|
||||
4318
POCloud/modbusMap.p
Normal file
254
POCloud/poc.py
Normal file
@@ -0,0 +1,254 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
|
||||
import threading
|
||||
import time
|
||||
import pickle
|
||||
import traceback
|
||||
|
||||
from pycomm.ab_comm.clx import Driver as ClxDriver
|
||||
from collections import deque
|
||||
from device_base import deviceBase
|
||||
|
||||
PLC_IP_ADDRESS = '192.168.1.10'
|
||||
|
||||
|
||||
def read_tag(addr, tag):
|
||||
"""Read a tag from the PLC."""
|
||||
plc = ClxDriver()
|
||||
if plc.open(addr):
|
||||
try:
|
||||
v = plc.read_tag(tag)
|
||||
# print(v)
|
||||
return v
|
||||
except Exception:
|
||||
err = plc.get_status()
|
||||
plc.close()
|
||||
print err
|
||||
plc.close()
|
||||
|
||||
|
||||
def read_array(addr, tag, start, end):
|
||||
"""Read an array from the PLC."""
|
||||
plc = ClxDriver()
|
||||
if plc.open(addr):
|
||||
arr_vals = []
|
||||
try:
|
||||
for i in range(start, end):
|
||||
tag_w_index = tag + "[{}]".format(i)
|
||||
v = plc.read_tag(tag_w_index)
|
||||
# print('{} - {}'.format(tag_w_index, v))
|
||||
arr_vals.append(round(v[0], 4))
|
||||
# print(v)
|
||||
if arr_vals:
|
||||
return arr_vals
|
||||
else:
|
||||
print("No length for {}".format(addr))
|
||||
return False
|
||||
except Exception:
|
||||
err = plc.get_status()
|
||||
plc.close()
|
||||
print err
|
||||
plc.close()
|
||||
|
||||
|
||||
def write_tag(addr, tag, val):
|
||||
"""Write a tag to the PLC."""
|
||||
plc = ClxDriver()
|
||||
if plc.open(addr):
|
||||
try:
|
||||
cv = plc.read_tag(tag)
|
||||
wt = plc.write_tag(tag, val, cv[1])
|
||||
return wt
|
||||
except Exception:
|
||||
err = plc.get_status()
|
||||
plc.close()
|
||||
print err
|
||||
plc.close()
|
||||
|
||||
|
||||
class Card(object):
|
||||
"""Card class definition."""
|
||||
|
||||
def __init__(self, unified_time):
|
||||
"""Initialize the Card."""
|
||||
self.sc = []
|
||||
self.dc = []
|
||||
self.sent = False
|
||||
self.read_time = unified_time
|
||||
self.readCard()
|
||||
self.card_id = 0
|
||||
self.num_points = 0
|
||||
|
||||
def readCard(self):
|
||||
"""Read the card data."""
|
||||
self.card_id = read_tag(PLC_IP_ADDRESS, "Card_Past[1].ID")[0]
|
||||
self.num_points = int(read_tag(PLC_IP_ADDRESS, "Card_Past[1].Num_Points")[0])
|
||||
print("reading {} from card ID {}".format(self.num_points, self.card_id))
|
||||
|
||||
if self.num_points > 1:
|
||||
surf_pos = False
|
||||
while not surf_pos:
|
||||
surf_pos = read_array(PLC_IP_ADDRESS, 'Card_Past[1].Surface_Position', 1, self.num_points)
|
||||
|
||||
surf_lod = False
|
||||
while not surf_lod:
|
||||
surf_lod = read_array(PLC_IP_ADDRESS, 'Card_Past[1].Surface_Load', 1, self.num_points)
|
||||
|
||||
down_pos = False
|
||||
while not down_pos:
|
||||
down_pos = read_array(PLC_IP_ADDRESS, 'Card_Past[1].Downhole_Position', 1, self.num_points)
|
||||
|
||||
down_lod = False
|
||||
while not down_lod:
|
||||
down_lod = read_array(PLC_IP_ADDRESS, 'Card_Past[1].Downhole_Load', 1, self.num_points)
|
||||
|
||||
if surf_pos and surf_lod and down_pos and down_lod:
|
||||
for i in range(0, self.num_points-1):
|
||||
if not (surf_pos[i] == 0.0) and not (surf_lod[i] == 0.0):
|
||||
self.sc.append([surf_pos[i], surf_lod[i]])
|
||||
if not (down_pos[i] == 0.0) and not (down_lod[i] == 0.0):
|
||||
self.dc.append([down_pos[i], down_lod[i]])
|
||||
return True
|
||||
else:
|
||||
print("couldn't get a full set of position/load pairs")
|
||||
print("Here's what we got:")
|
||||
print("SURFACE POS")
|
||||
print(surf_pos)
|
||||
print("SURFACE LOAD")
|
||||
print(surf_lod)
|
||||
print("DOWNHOLE POS")
|
||||
print(down_pos)
|
||||
print("DOWNHOLE LOAD")
|
||||
print(down_lod)
|
||||
return False
|
||||
|
||||
def stringify(self):
|
||||
''' returns a list of two strings [surface card, downhole card]'''
|
||||
# sc_str = "["
|
||||
# dc_str = "["
|
||||
# for i in range(0, len(self.sc)):
|
||||
# this_sc_pair = self.sc[i]
|
||||
# try:
|
||||
# sc_str = sc_str + "[{},{}],".format(this_sc_pair[0], this_sc_pair[1])
|
||||
# except IndexError, e:
|
||||
# print("IndexError: {}\nFor {}".format(e, this_sc_pair))
|
||||
|
||||
# for j in range(0, len(self.dc)):
|
||||
# dc_str = dc_str + "[{},{}],".format(self.dc[j][0], self.dc[j][1])
|
||||
# dc_str = dc_str + "[{},{}]]".format(self.dc[0][0], self.dc[0][1])
|
||||
sc_str = str(self.sc)
|
||||
dc_str = str(self.dc)
|
||||
return[sc_str, dc_str]
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.version = "5"
|
||||
self.finished = threading.Event()
|
||||
threading.Thread.start(self)
|
||||
|
||||
self.forceSend = False
|
||||
self.cardLoopTimer = 600
|
||||
self.statusChanged = False
|
||||
self.al_status_last = False
|
||||
self.dl_status_last = False
|
||||
self.card_storage_limit = 5
|
||||
self.last_card_sent_time = 0
|
||||
self.runLoopStatus = ""
|
||||
self.eventIds = []
|
||||
self.wellSetup = []
|
||||
|
||||
# load stored event ID's
|
||||
try:
|
||||
with open('eventIds.p', 'rb') as handle:
|
||||
self.eventIds = pickle.load(handle)
|
||||
|
||||
print "found pickled eventID dictionary: {0}".format(self.eventIds)
|
||||
except Exception:
|
||||
print "couldn't load enent ID's from pickle"
|
||||
|
||||
|
||||
# load stored wellconfig's
|
||||
try:
|
||||
with open('wellSetup.p', 'rb') as handle:
|
||||
self.wellSetup = pickle.load(handle)
|
||||
print "Found pickled Well Setup (but it's going to be too long to print)"
|
||||
except Exception:
|
||||
print "couldn't load Well Setup from pickle"
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
pass
|
||||
|
||||
|
||||
def run(self):
|
||||
wait_sec = 30
|
||||
for i in range(0, wait_sec):
|
||||
print("poc driver will start in {} seconds".format(wait_sec - i))
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
card_storage = deque([]) # array of the last x cards
|
||||
while True:
|
||||
if self.forceSend:
|
||||
print("FORCE SEND: TRUE")
|
||||
try:
|
||||
self.statusChanged = False
|
||||
|
||||
self.runLoopStatus = "Reading Cards"
|
||||
if len(card_storage) > 0:
|
||||
if not read_tag(PLC_IP_ADDRESS, "Card_Past[1].ID")[0] == card_storage[0].card_id:
|
||||
current_time = time.time()
|
||||
current_card = Card(current_time)
|
||||
self.sendtodbDevJSON(1, "card_history", current_card.card_id, current_time, 'poc')
|
||||
if (current_card.read_time - self.last_card_sent_time) > self.cardLoopTimer or self.forceSend:
|
||||
cards = current_card.stringify()
|
||||
self.sendtodbDev(1, 'sc', cards[0], current_time, "poc")
|
||||
self.sendtodbDev(1, 'dc', cards[1], current_time, "poc")
|
||||
self.last_card_sent_time = time.time()
|
||||
current_card.sent = True
|
||||
card_storage.appendleft(current_card)
|
||||
while len(card_storage) > self.card_storage_limit:
|
||||
card_storage.pop()
|
||||
if self.statusChanged:
|
||||
for c in card_storage:
|
||||
if not c.sent:
|
||||
cstr = c.stringify()
|
||||
self.sendtodbDev(1, 'sc', cstr[0], c.read_time, "poc")
|
||||
self.sendtodbDev(1, 'dc', cstr[1], c.read_time, "poc")
|
||||
self.last_card_sent_time = time.time()
|
||||
else:
|
||||
current_time = time.time()
|
||||
current_card = Card(current_time)
|
||||
self.sendtodbDevJSON(1, "card_history", current_card.card_id, current_time, 'poc')
|
||||
if (current_card.read_time - self.last_card_sent_time) > self.cardLoopTimer or self.forceSend:
|
||||
cards = current_card.stringify()
|
||||
self.sendtodbDev(1, 'sc', cards[0], current_time, "poc")
|
||||
self.sendtodbDev(1, 'dc', cards[1], current_time, "poc")
|
||||
self.last_card_sent_time = time.time()
|
||||
current_card.sent = True
|
||||
card_storage.appendleft(current_card)
|
||||
self.runLoopStatus = "Complete"
|
||||
time.sleep(3)
|
||||
self.forceSend = False
|
||||
except Exception, e:
|
||||
sleep_timer = 20
|
||||
print("Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(self.runLoopStatus, e, sleep_timer))
|
||||
traceback.print_exc()
|
||||
time.sleep(sleep_timer)
|
||||
|
||||
|
||||
def poc_sync(self, name, value):
|
||||
self.sendtodb(1, "connected", "true", 0, 'poc')
|
||||
return True
|
||||
|
||||
def poc_refresh_data(self, name, value):
|
||||
self.forceSend = True
|
||||
return True
|
||||
567
POCloud/poc_bak.py
Normal file
@@ -0,0 +1,567 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import traceback
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
from device_base import deviceBase
|
||||
from datetime import datetime
|
||||
import requests
|
||||
import json
|
||||
import calendar
|
||||
import pickle
|
||||
from dateutil import tz
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
from requests.packages.urllib3.exceptions import InsecurePlatformWarning
|
||||
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
requests.packages.urllib3.disable_warnings(InsecurePlatformWarning)
|
||||
|
||||
API_HTTP_TYPE = "https"
|
||||
API_DEVICE_ADDRESS = "192.168.1.30"
|
||||
API_DEVICE_PORT = 5000
|
||||
|
||||
API_BASE_URL = "{}://{}:{}".format(API_HTTP_TYPE, API_DEVICE_ADDRESS, API_DEVICE_PORT)
|
||||
|
||||
go_channels = {
|
||||
'spm_average': {'channel': 'go_average_spm', 'last_value_sent': None},
|
||||
'downhole_gross_stroke_average': {'channel': 'go_downhole_gross_stroke', 'last_value_sent': None}, # TODO: ADD
|
||||
'downhole_net_stroke_average': {'channel': 'go_downhole_net_stroke', 'last_value_sent': None}, # TODO: ADD
|
||||
'electricity_cost_total': {'channel': 'go_electricity_cost', 'last_value_sent': None},
|
||||
'fluid_level_average': {'channel': 'go_fluid_above_pump', 'last_value_sent': None},
|
||||
'inflow_rate_average': {'channel': 'go_inflow_rate', 'last_value_sent': None},
|
||||
'kWh_used_total': {'channel': 'go_kwh', 'last_value_sent': None},
|
||||
'kWh_regen_total': {'channel': 'go_kwh_regen', 'last_value_sent': None},
|
||||
'lifting_cost_average': {'channel': 'go_lifting_cost', 'last_value_sent': None},
|
||||
'peak_pr_load': {'channel': 'go_peak_load', 'last_value_sent': None},
|
||||
'min_pr_load': {'channel': 'go_min_load', 'last_value_sent': None},
|
||||
'percent_run': {'channel': 'go_percent_run', 'last_value_sent': None},
|
||||
'polished_rod_hp_average': {'channel': 'go_polished_rod_hp', 'last_value_sent': None},
|
||||
'pump_hp_average': {'channel': 'go_pump_hp', 'last_value_sent': None}, # TODO: ADD
|
||||
'production_total': {'channel': 'go_production_calculated', 'last_value_sent': None},
|
||||
'pump_intake_pressure_average': {'channel': 'go_pump_intake_pressure', 'last_value_sent': None},
|
||||
'surface_stroke_length_average': {'channel': 'go_surface_stroke_length', 'last_value_sent': None}, #TODO: ADD
|
||||
'tubing_movement_average': {'channel': "go_tubing_movement", 'last_value_sent': None}, #TODO: ADD
|
||||
}
|
||||
|
||||
tag_channels = {
|
||||
'Polished Rod HP': {'channel': 'polished_rod_hp', 'last_value_sent': None},
|
||||
'Peak Downhole Load': {'channel': 'downhole_peak_load', 'last_value_sent': None}, #TODO: ADD
|
||||
'Gross Stroke Length': {'channel': 'downhole_gross_stroke', 'last_value_sent': None},
|
||||
'Stroke Speed': {'channel': 'SPM', 'last_value_sent': None},
|
||||
'Tubing Head Pressure': {'channel': 'tubing_head_pressure', 'last_value_sent': None},
|
||||
'Minimum Polished Rod Load': {'channel': 'surface_min_load', 'last_value_sent': None},
|
||||
'Fluid Load': {'channel': 'downhole_fluid_load', 'last_value_sent': None},
|
||||
'Downhole Max. Position': {'channel': 'downhole_max_position', 'last_value_sent': None},
|
||||
'Downhole Net Stroke': {'channel': 'downhole_net_stroke', 'last_value_sent': None},
|
||||
'Pump Fill Percent': {'channel': 'fillage_percent', 'last_value_sent': None},
|
||||
'Downhole Pump HP': {'channel': 'pump_hp', 'last_value_sent': None},
|
||||
'Surface Min. Position': {'channel': 'surface_min_position', 'last_value_sent': None}, #TODO: ADD
|
||||
'Pump Intake Pressure': {'channel': 'pump_intake_pressure', 'last_value_sent': None},
|
||||
'Surface Max. Position': {'channel': 'surface_max_position', 'last_value_sent': None}, #TODO: ADD
|
||||
'Tubing Movement': {'channel': 'tubing_movement', 'last_value_sent': None},
|
||||
'Downhole Min. Position': {'channel': 'downhole_min_position', 'last_value_sent': None},
|
||||
'Peak Polished Rod Load': {'channel': 'surface_max_load', 'last_value_sent': None},
|
||||
'Minimum Downhole Load': {'channel': 'downhole_min_load', 'last_value_sent': None}, #TODO: ADD
|
||||
'Surface Stroke Length': {'channel': 'surface_stroke_length', 'last_value_sent': None},
|
||||
'Downhole Adjusted Gross Stroke': {'channel': 'downhole_adjusted_gross_stroke', 'last_value_sent': None},
|
||||
'Fluid Level': {'channel': 'fluid_above_pump', 'last_value_sent': None},
|
||||
'Stroke Production': {'channel': 'stroke_production', 'last_value_sent': None}
|
||||
}
|
||||
|
||||
dt_channels = { # Current Daily Totals
|
||||
'Electricity Cost': {'channel': 'dt_electricity_cost', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Inflow Rate': {'channel': 'dt_inflow_rate', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Energy Regen': {'channel': 'dt_kWh_regen', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Min Load': {'channel': 'dt_min_load', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Polished Rod HP': {'channel': 'dt_polished_rod_hp', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Calculated Production': {'channel': 'dt_calculated_production', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Projected Production': {'channel': 'dt_projected_production', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Pump HP': {'channel': 'dt_pump_hp', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Pump Intake Presure': {'channel': 'dt_pump_intake_pressure', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Surface Stroke Length': {'channel': 'dt_surface_stroke_length', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Tubing Movement': {'channel': 'dt_tubing_movement', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Downhole Net Stroke': {'channel': 'dt_downhole_net_stroke', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Average SPM': {'channel': 'dt_average_spm', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Peak Load': {'channel': 'dt_peak_load', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'kWh': {'channel': 'dt_kWh', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Percent Run': {'channel': 'dt_percent_run', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Fluid Level': {'channel': 'dt_fluid_level', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Lifting Cost': {'channel': 'dt_lifting_cost', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
'Full Card Production': {'channel': 'dt_full_card_production', 'last_value_sent': None, 'change_threshold': 1.5, 'last_send_ts': 0},
|
||||
}
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.forceSend = True
|
||||
self.version = "3"
|
||||
self.device_address = "http://192.168.1.30"
|
||||
# self.device_address = "http://localhost"
|
||||
self.cardLoopTimer = 600
|
||||
self.finished = threading.Event()
|
||||
threading.Thread.start(self)
|
||||
self.status_changed = False
|
||||
|
||||
self.last_card_send_time = 0
|
||||
self.al_status_last = False
|
||||
self.dl_status_last = False
|
||||
|
||||
# load stored Run Status ID's
|
||||
try:
|
||||
with open('runstatusIds.p', 'rb') as handle:
|
||||
self.runstatusIds = pickle.load(handle)
|
||||
|
||||
print "found pickled Run Status ID dictionary: {0}".format(self.runstatusIds)
|
||||
except:
|
||||
print "couldn't load Run Status ID's from pickle"
|
||||
self.runstatusIds = []
|
||||
|
||||
# load stored event ID's
|
||||
try:
|
||||
with open('eventIds.p', 'rb') as handle:
|
||||
self.eventIds = pickle.load(handle)
|
||||
|
||||
print "found pickled eventID dictionary: {0}".format(self.eventIds)
|
||||
except:
|
||||
print "couldn't load event ID's from pickle"
|
||||
self.eventIds = []
|
||||
|
||||
# load stored Well Test ID's
|
||||
try:
|
||||
with open('welltestIDs.p', 'rb') as handle:
|
||||
self.welltestIDs = pickle.load(handle)
|
||||
|
||||
print "found pickled welltestIDs dictionary: {0}".format(self.welltestIDs)
|
||||
except:
|
||||
print "couldn't load well test ID's from pickle"
|
||||
self.welltestIDs = []
|
||||
|
||||
# load stored Gauge Off ID's
|
||||
try:
|
||||
with open('gaugeoffIds.p', 'rb') as handle:
|
||||
self.gaugeoffIds = pickle.load(handle)
|
||||
|
||||
print "found pickled gaugeoffIds dictionary: {0}".format(self.gaugeoffIds)
|
||||
except:
|
||||
print "couldn't load gauge off ID's from pickle"
|
||||
self.gaugeoffIds = []
|
||||
|
||||
# load stored Fluid Shot ID's
|
||||
try:
|
||||
with open('fluidshotIds.p', 'rb') as handle:
|
||||
self.fluidshotIds = pickle.load(handle)
|
||||
|
||||
print "found pickled fluidshotIDs dictionary: {0}".format(self.fluidshotIds)
|
||||
except:
|
||||
print "couldn't load fluid shot ID's from pickle"
|
||||
self.fluidshotIds = []
|
||||
|
||||
# load stored note ID's
|
||||
try:
|
||||
with open('noteIDs.p', 'rb') as handle:
|
||||
self.noteIDs = pickle.load(handle)
|
||||
|
||||
print "found pickled noteID dictionary: {0}".format(self.noteIDs)
|
||||
except:
|
||||
print "couldn't load note ID's from pickle"
|
||||
self.noteIDs = []
|
||||
|
||||
# load stored last_card_id
|
||||
try:
|
||||
with open('last_card_id.p', 'rb') as handle:
|
||||
self.last_card_id = pickle.load(handle)
|
||||
|
||||
print "found pickled last_card_id: {0}".format(self.last_card_id)
|
||||
except:
|
||||
print "couldn't load last_card_id from pickle"
|
||||
self.last_card_id = 0
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
channels["status"]["last_value"] = ""
|
||||
|
||||
def run(self):
|
||||
self.last_status = ""
|
||||
runLoopStatus = "Startup"
|
||||
while True:
|
||||
try:
|
||||
if self.forceSend:
|
||||
print("!!!!!!!!!!!!!!! FORCE SEND !!!!!!!!!!!!!!!")
|
||||
|
||||
runLoopStatus = "checkStatus"
|
||||
chk_status = self.checkStatus(self.last_status)
|
||||
if chk_status:
|
||||
self.last_status = chk_status
|
||||
self.status_changed = True
|
||||
|
||||
runLoopStatus = "checkEvents"
|
||||
self.checkEvents()
|
||||
|
||||
runLoopStatus = "checkNotes"
|
||||
self.checkNotes()
|
||||
|
||||
runLoopStatus = "checkWellTests"
|
||||
self.checkWellTests()
|
||||
|
||||
runLoopStatus = "checkFluidShots"
|
||||
self.checkFluidShots()
|
||||
|
||||
runLoopStatus = "checkDailyTotals"
|
||||
self.checkDailyTotals()
|
||||
|
||||
runLoopStatus = "checkGaugeOffData"
|
||||
self.checkGaugeOffData()
|
||||
|
||||
runLoopStatus = "checkStoredValues"
|
||||
self.checkStoredValues()
|
||||
|
||||
# runLoopStatus = "getDataLoggerStatus()"
|
||||
# self.getDataLoggerStatus()
|
||||
|
||||
if self.status_changed:
|
||||
runLoopStatus = "getLatestXCards"
|
||||
self.forceSend = True
|
||||
self.checkLatestCard(numCards=5)
|
||||
else:
|
||||
runLoopStatus = "checkLatestCard"
|
||||
self.checkLatestCard()
|
||||
|
||||
# if self.forceSend or (checkBackupSkipped > checkBackupEvery):
|
||||
# runLoopStatus = "checkBackup"
|
||||
# self.checkBackup()
|
||||
# checkBackupSkipped = 0
|
||||
# checkBackupSkipped = checkBackupSkipped + 1
|
||||
|
||||
runLoopStatus = "Complete"
|
||||
time.sleep(10)
|
||||
self.forceSend = False
|
||||
except Exception, e:
|
||||
sleep_timer = 20
|
||||
print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer)
|
||||
traceback.print_exc()
|
||||
time.sleep(sleep_timer)
|
||||
|
||||
def checkStatus(self, last_status):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/run_status_log?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.runstatusIds:
|
||||
new_status = req_data['objects'][i]["run_status"]
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S.%f').timetuple())
|
||||
self.sendtodb('status', new_status, timestamp)
|
||||
self.runstatusIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.runstatusIds) > 20:
|
||||
del self.runstatusIds[0]
|
||||
with open('runstatusIds.p', 'wb') as handle:
|
||||
pickle.dump(self.runstatusIds, handle)
|
||||
|
||||
if req_data['objects'][-1:][0]['run_status'] != last_status:
|
||||
print "Status has changed from {0} to {1} @ {2}".format(last_status, req_data['objects'][-1:][0]['run_status'], req_data['objects'][-1:][0]['created_on'])
|
||||
return req_data['objects'][-1:][0]['run_status']
|
||||
except Exception as e:
|
||||
print "Error during checkStatus..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkEvents(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/events?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get( url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.eventIds:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S.%f').timetuple())
|
||||
self.sendtodbJSON('events', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.eventIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.eventIds) > 20:
|
||||
del self.eventIds[0]
|
||||
with open('eventIds.p', 'wb') as handle:
|
||||
pickle.dump(self.eventIds, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkEvents..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkNotes(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/notes?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get( url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.noteIDs:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S').timetuple())
|
||||
self.sendtodbJSON('notes', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.noteIDs.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.noteIDs) > 20:
|
||||
del self.noteIDs[0]
|
||||
with open('noteIDs.p', 'wb') as handle:
|
||||
pickle.dump(self.noteIDs, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkNotes..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkFluidShots(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/fluid_shots?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.fluidshotIds:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S').timetuple())
|
||||
self.sendtodbJSON('fluidshots', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.fluidshotIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.fluidshotIds) > 20:
|
||||
del self.fluidshotIds[0]
|
||||
with open('fluidshotIds.p', 'wb') as handle:
|
||||
pickle.dump(self.fluidshotIds, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkFluidShots..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkWellTests(self):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/well_test?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url, verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.welltestIDs:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S').timetuple())
|
||||
self.sendtodbJSON('welltests', json.dumps(req_data['objects'][i]), timestamp)
|
||||
self.welltestIDs.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.welltestIDs) > 20:
|
||||
del self.welltestIDs[0]
|
||||
with open('welltestIDs.p', 'wb') as handle:
|
||||
pickle.dump(self.welltestIDs, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkEvents..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkDailyTotals(self):
|
||||
global API_BASE_URL, dt_channels
|
||||
try:
|
||||
api_req = requests.get("{}/api/today_totals".format(API_BASE_URL), verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
for i in range(0, len(req_data)):
|
||||
if req_data[i]['name'] in dt_channels:
|
||||
if dt_channels[req_data[i]['name']]['last_value_sent'] is None:
|
||||
self.sendtodb(dt_channels[req_data[i]['name']]['channel'], req_data[i]['value'], 0)
|
||||
dt_channels[req_data[i]['name']]['last_value_sent'] = req_data[i]['value']
|
||||
dt_channels[req_data[i]['name']]['last_send_ts'] = time.time()
|
||||
elif abs(dt_channels[req_data[i]['name']]['last_value_sent'] - req_data[i]['value']) > dt_channels[req_data[i]['name']]['change_threshold']:
|
||||
self.sendtodb(dt_channels[req_data[i]['name']]['channel'], req_data[i]['value'], 0)
|
||||
dt_channels[req_data[i]['name']]['last_value_sent'] = req_data[i]['value']
|
||||
dt_channels[req_data[i]['name']]['last_send_ts'] = time.time()
|
||||
elif time.time() - dt_channels[req_data[i]['name']]['last_send_ts'] > 3600: # Send values every hour
|
||||
self.sendtodb(dt_channels[req_data[i]['name']]['channel'], req_data[i]['value'], 0)
|
||||
dt_channels[req_data[i]['name']]['last_value_sent'] = req_data[i]['value']
|
||||
dt_channels[req_data[i]['name']]['last_send_ts'] = time.time()
|
||||
except Exception as e:
|
||||
print "Error during checkDailyTotals..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def checkGaugeOffData(self):
|
||||
global API_BASE_URL, go_channels
|
||||
try:
|
||||
url = API_BASE_URL + '/api/gauge_off?q={"order_by":[{"field":"created_on","direction":"desc"}]}'
|
||||
api_req = requests.get(url , verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
req_data['objects'].reverse()
|
||||
for i in range(0,len(req_data['objects'])):
|
||||
if int(req_data['objects'][i]['_id']) not in self.gaugeoffIds:
|
||||
timestamp = calendar.timegm(datetime.strptime(req_data['objects'][i]['created_on'], '%Y-%m-%dT%H:%M:%S.%f').timetuple())
|
||||
for col_name in req_data['objects'][i]:
|
||||
if col_name in go_channels:
|
||||
self.sendtodb(go_channels[col_name]['channel'], req_data['objects'][i][col_name], timestamp)
|
||||
|
||||
self.gaugeoffIds.append(int(req_data['objects'][i]['_id']))
|
||||
if len(self.gaugeoffIds) > 20:
|
||||
del self.gaugeoffIds[0]
|
||||
with open('gaugeoffIds.p', 'wb') as handle:
|
||||
pickle.dump(self.gaugeoffIds, handle)
|
||||
return True
|
||||
except Exception as e:
|
||||
print "Error during checkGaugeOffData..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def checkStoredValues(self):
|
||||
global API_BASE_URL, tag_channels
|
||||
try:
|
||||
api_req = requests.get( '{}/api/latest'.format(API_BASE_URL), verify=False)
|
||||
if api_req.status_code == 200:
|
||||
req_data = json.loads(api_req.text)
|
||||
for i in range(0, len(req_data)):
|
||||
if req_data[i]['tag_name'] in tag_channels:
|
||||
if tag_channels[req_data[i]['tag_name']]['last_value_sent'] is None:
|
||||
self.sendtodb(tag_channels[req_data[i]['tag_name']]['channel'], req_data[i]['value'], calendar.timegm(datetime.strptime(req_data[i]['datetime'], '%Y-%m-%d %H:%M:%S.%f').timetuple()))
|
||||
tag_channels[req_data[i]['tag_name']]['last_value_sent'] = req_data[i]['value']
|
||||
elif req_data[i]['value'] != tag_channels[req_data[i]['tag_name']]['last_value_sent']:
|
||||
self.sendtodb(tag_channels[req_data[i]['tag_name']]['channel'], req_data[i]['value'], calendar.timegm(datetime.strptime(req_data[i]['datetime'], '%Y-%m-%d %H:%M:%S.%f').timetuple()))
|
||||
tag_channels[req_data[i]['tag_name']]['last_value_sent'] = req_data[i]['value']
|
||||
except Exception as e:
|
||||
print "Error during checkStoredValues..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
|
||||
def checkLatestCard(self, numCards = 1):
|
||||
global API_BASE_URL
|
||||
try:
|
||||
url = API_BASE_URL + '/api/cards?q={"order_by":[{"field":"created_on","direction":"desc"}], "limit":' + str(numCards) + "}"
|
||||
api_req = requests.get(url, verify=False)
|
||||
req_data = json.loads(api_req.text)['objects']
|
||||
|
||||
utc_tz = tz.tzutc()
|
||||
local_tz = tz.tzlocal()
|
||||
|
||||
# check the card to see if its new
|
||||
# 1. if its new send the folder/file_name to the card_history channel
|
||||
# 2. if its new and its been 10 minutes since you last sent an entire card, then send up all of the data
|
||||
for i in range(0, len(req_data)):
|
||||
current_card = req_data[i]
|
||||
if current_card['_id'] > self.last_card_id:
|
||||
#2016-11-23T00:37:02.806026
|
||||
dt = datetime.strptime(current_card['created_on'], '%Y-%m-%dT%H:%M:%S.%f')
|
||||
dt_utc = dt.replace(tzinfo=utc_tz)
|
||||
dt_local = dt_utc.astimezone(tz.tzlocal())
|
||||
|
||||
timestamp_utc = calendar.timegm(dt_utc.timetuple())
|
||||
timestamp_local = calendar.timegm(dt_local.timetuple())
|
||||
|
||||
print "New card detected @ {0}".format(datetime.strftime(dt_local, "%Y-%m-%dT%H:%M:%S.%f"))
|
||||
# set the last value = to current value and upload your data
|
||||
self.sendtodb("card_history", current_card['_id'], timestamp_utc)
|
||||
self.last_card_id = current_card['_id']
|
||||
with open('last_card_id.p', 'wb') as handle:
|
||||
pickle.dump(self.last_card_id, handle)
|
||||
|
||||
# check the last time the card was updated
|
||||
if (time.time() - self.last_card_send_time) > self.cardLoopTimer or self.status_changed or self.forceSend:
|
||||
# its been 10 minutes, send the full upload
|
||||
print "Either status has changed or last stored card is too old."
|
||||
self.sendtodb("cardtype", current_card['stroke_type'], int(timestamp_utc))
|
||||
|
||||
# TODO: FIX CARD PARSING
|
||||
s_p = current_card["surf_pos"].replace("[", "").replace("]", "").split(", ")
|
||||
s_l = current_card["surf_lod"].replace("[", "").replace("]", "").split(", ")
|
||||
d_p = current_card["down_pos"].replace("[", "").replace("]", "").split(", ")
|
||||
d_l = current_card["down_lod"].replace("[", "").replace("]", "").split(", ")
|
||||
newSc = "["
|
||||
newDc = "["
|
||||
|
||||
for i in range(len(s_p)):
|
||||
try:
|
||||
if s_p[i] is None:
|
||||
continue
|
||||
if s_p[i] != 0.0 and s_l[i] != 0.0:
|
||||
newSc += "[" + str(round(float(s_p[i]), 3)) + "," + str(round(float(s_l[i]), 3)) + "],"
|
||||
except:
|
||||
pass
|
||||
newSc = newSc[:-1] + "]"
|
||||
|
||||
for i in range(len(d_p)):
|
||||
try:
|
||||
if d_p[i] is None:
|
||||
continue
|
||||
if d_p[i] != 0.0 and d_l[i] != 0.0:
|
||||
newDc += "[" + str(round(float(d_p[i]), 3)) + "," + str(round(float(d_l[i]), 3)) + "],"
|
||||
except:
|
||||
pass
|
||||
newDc = newDc[:-1] + "]"
|
||||
|
||||
self.sendtodb("surf_pos", current_card["surf_pos"], timestamp_utc)
|
||||
self.sendtodb("surf_lod", current_card["surf_lod"], timestamp_utc)
|
||||
self.sendtodb("down_pos", current_card["down_pos"], timestamp_utc)
|
||||
self.sendtodb("down_lod", current_card["down_lod"], timestamp_utc)
|
||||
self.sendtodb("sc", newSc, timestamp_utc)
|
||||
self.sendtodb("dc", newDc, timestamp_utc)
|
||||
|
||||
except Exception as e:
|
||||
print "Error during checkLatestCard..."
|
||||
print("++++++== TRACEBACK ==++++++")
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
# def getDataLoggerStatus(self):
|
||||
# try:
|
||||
# data = json.loads(requests.get(self.device_address + "/json/pythonstatus/", verify=False).text)
|
||||
# al_status = "Not OK"
|
||||
# if data['status']['alarmLogger']:
|
||||
# al_status = "OK"
|
||||
#
|
||||
# if al_status != self.al_status_last:
|
||||
# self.sendtodb("alarmlogger_status", al_status, 0)
|
||||
# self.al_status_last = al_status
|
||||
#
|
||||
# dl_status = "Not OK"
|
||||
# if data['status']['dataLogger']:
|
||||
# dl_status = "OK"
|
||||
# if al_status != self.dl_status_last:
|
||||
# self.sendtodb("datalogger_status", dl_status, 0)
|
||||
# self.dl_status_last = dl_status
|
||||
# except Exception, e:
|
||||
# print("getDataLoggerStatus Error: {}".format(e))
|
||||
|
||||
|
||||
def poc_get_card(self, name, value):
|
||||
self.getcard(value)
|
||||
|
||||
def poc_sync(self, name, value):
|
||||
self.sendtodb("connected", "true", 0)
|
||||
return True
|
||||
|
||||
def poc_set_address(self, name, value):
|
||||
self.device_address = value
|
||||
return True
|
||||
|
||||
def poc_refresh_data(self, name, value):
|
||||
self.forceSend = True
|
||||
return True
|
||||
467
POCloud/w_csv/poc.py
Normal file
@@ -0,0 +1,467 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import types
|
||||
import traceback
|
||||
import binascii
|
||||
import threading
|
||||
import time
|
||||
import thread
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import serial
|
||||
import minimalmodbus
|
||||
import pickle
|
||||
import re
|
||||
from device_base import deviceBase
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
try:
|
||||
import json
|
||||
except:
|
||||
import simplejson as json
|
||||
import calendar
|
||||
|
||||
|
||||
def min_max_check(val, min, max):
|
||||
if val < min:
|
||||
return min
|
||||
elif val > max:
|
||||
return max
|
||||
else:
|
||||
return val
|
||||
|
||||
|
||||
go_channels = {
|
||||
"percent_run": {"meshifyName": "go_percent_run", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"kWh": {"meshifyName": "go_kwh", "last_value": "", "last_send_time": 0, "data_type": " float", "change_amount": 0},
|
||||
"electricity_cost": {"meshifyName": "go_electricity_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"peak_load": {"meshifyName": "go_peak_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"min_load": {"meshifyName": "go_min_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"average_SPM": {"meshifyName": "go_average_spm", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"production_calculated": {"meshifyName": "go_production_calculated", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"full_card_production": {"meshifyName": "go_full_card_production", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"polished_rod_HP": {"meshifyName": "go_polished_rod_hp", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"lifting_cost": {"meshifyName": "go_lifting_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"fluid_above_pump": {"meshifyName": "go_fluid_above_pump", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"pump_intake_pressure": {"meshifyName": "go_pump_intake_pressure", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"kWh_regen": {"meshifyName": "go_kwh_regen", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"inflow_rate": {"meshifyName": "go_inflow_rate", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
}
|
||||
|
||||
stroke_data_min_upload_time = 300 # seconds
|
||||
|
||||
channels = {
|
||||
"status": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
|
||||
"card_history": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
|
||||
"well_name": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"tubing_head_pressure": {"last_value": "", "data_type": "float", "change_amount": 5, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"fluid_gradient": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"stuffing_box_friction": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"dt": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_adjusted_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_net_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_fluid_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"surface_max_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"surface_min_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"tubing_movement": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"surface_stroke_length": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"fillage_percent": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"polished_rod_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"pump_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"SPM": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"fluid_above_pump": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"pump_intake_pressure": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"stroke_production": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"drive_torque_mode": {"last_value": "", "data_type": "int", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"torque_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"speed_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_min_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_max_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
}
|
||||
|
||||
|
||||
total_min_upload_time = 300 # seconds
|
||||
dt_channels = { # Current Daily Totals
|
||||
"Average_SPM": {"meshify_channel": "dt_average_spm", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Downhole_Net_Stroke": {"meshify_channel": "dt_downhole_net_stroke", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Electricity_Cost": {"meshify_channel": "dt_electricity_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Fluid_Level": {"meshify_channel": "dt_fluid_level", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Full_Card_Production": {"meshify_channel": "dt_full_card_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Inflow_Rate": {"meshify_channel": "dt_inflow_rate", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"kWh": {"meshify_channel": "dt_kWh", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"kWh_Regen": {"meshify_channel": "dt_kWh_regen", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Lifting_Cost": {"meshify_channel": "dt_lifting_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Peak_Load": {"meshify_channel": "dt_peak_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Min_Load": {"meshify_channel": "dt_min_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Percent_Run": {"meshify_channel": "dt_percent_run", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Polished_Rod_HP": {"meshify_channel": "dt_polished_rod_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Calculated_Production": {"meshify_channel": "dt_calculated_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Projected_Production": {"meshify_channel": "dt_projected_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Pump_HP": {"meshify_channel": "dt_pump_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Pump_Intake_Presure": {"meshify_channel": "dt_pump_intake_pressure", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Surface_Stroke_Length": {"meshify_channel": "dt_surface_stroke_length", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Tubing_Movement": {"meshify_channel": "dt_tubing_movement", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
}
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.forceSend = True
|
||||
self.version = "2"
|
||||
self.device_address = "http://192.168.1.30/"
|
||||
self.cardLoopTimer = 600
|
||||
self.finished = threading.Event()
|
||||
threading.Thread.start(self)
|
||||
self.statusChanged = False
|
||||
self.al_status_last = False
|
||||
self.dl_status_last = False
|
||||
|
||||
# load stored event ID's
|
||||
try:
|
||||
with open('eventIds.p', 'rb') as handle:
|
||||
self.eventIds = pickle.load(handle)
|
||||
|
||||
print "found pickled eventID dictionary: {0}".format(self.eventIds)
|
||||
except:
|
||||
print "couldn't load enent ID's from pickle"
|
||||
self.eventIds = []
|
||||
|
||||
# load stored wellconfig's
|
||||
try:
|
||||
with open('wellSetup.p', 'rb') as handle:
|
||||
self.wellSetup = pickle.load(handle)
|
||||
|
||||
print "Found pickled Well Setup (but it's going to be too long to print)"
|
||||
# print self.wellConfig
|
||||
except:
|
||||
print "couldn't load Well Setup from pickle"
|
||||
self.wellSetup = []
|
||||
|
||||
self.sendtodbJSON("device_address", self.device_address, 0)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
channels["status"]["last_value"] = ""
|
||||
|
||||
def run(self):
|
||||
self.runLoopStatus = ""
|
||||
checkBackupEvery = 100
|
||||
checkBackupSkipped = 1
|
||||
while True:
|
||||
try:
|
||||
runLoopStatus = "checkEvents"
|
||||
self.checkEvents()
|
||||
|
||||
runLoopStatus = "checkStatus"
|
||||
self.checkStatus()
|
||||
|
||||
runLoopStatus = "checkDailyTotals"
|
||||
self.checkDailyTotals()
|
||||
|
||||
runLoopStatus = "checkGaugeOffData"
|
||||
self.checkGaugeOffData()
|
||||
|
||||
runLoopStatus = "getDataLoggerStatus()"
|
||||
self.getDataLoggerStatus()
|
||||
|
||||
if self.statusChanged:
|
||||
runLoopStatus = "getLatestXCards"
|
||||
self.getLatestXCards(5)
|
||||
else:
|
||||
runLoopStatus = "checkLatestCard"
|
||||
self.checkLatestCard()
|
||||
|
||||
if self.forceSend or (checkBackupSkipped > checkBackupEvery):
|
||||
runLoopStatus = "checkBackup"
|
||||
self.checkBackup()
|
||||
checkBackupSkipped = 0
|
||||
checkBackupSkipped = checkBackupSkipped + 1
|
||||
|
||||
runLoopStatus = "Complete"
|
||||
time.sleep(3)
|
||||
self.forceSend = False
|
||||
except Exception, e:
|
||||
sleep_timer = 20
|
||||
print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer)
|
||||
time.sleep(sleep_timer)
|
||||
|
||||
def checkBackup(self):
|
||||
backupList = json.loads(requests.get(self.device_address + "/json/backups").text)
|
||||
file = backupList["backups"][0]
|
||||
data = json.loads(requests.get(self.device_address + "/json/backups/" + file).text)
|
||||
timestamp = time.time()
|
||||
if data != self.wellSetup or self.forceSend:
|
||||
self.sendtodbJSON("well_setup", json.dumps(data), timestamp)
|
||||
self.wellSetup = data
|
||||
with open('wellSetup.p', 'wb') as handle:
|
||||
pickle.dump(self.wellSetup, handle)
|
||||
|
||||
def checkEvents(self):
|
||||
data = json.loads(requests.get(self.device_address + "/json/event_list").text)
|
||||
events = data["events"]
|
||||
for event in events:
|
||||
if int(event["id"]) not in self.eventIds:
|
||||
# timestamp = calendar.timegm(time.strptime(event["datetime"], '%Y-%m-%dT%H:%M:%S.%fZ'))
|
||||
date = event['datetime']
|
||||
reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d*)Z"
|
||||
fd = re.search(reg, date)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
|
||||
# timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
|
||||
# we have a new event
|
||||
self.sendtodbJSON("events", json.dumps(event), timestamp)
|
||||
self.eventIds.append(int(event["id"]))
|
||||
if len(self.eventIds) > 50:
|
||||
del self.eventIds[0]
|
||||
with open('eventIds.p', 'wb') as handle:
|
||||
pickle.dump(self.eventIds, handle)
|
||||
|
||||
def checkStatus(self):
|
||||
statusMap = {
|
||||
0: 'Stopped',
|
||||
1: 'Running',
|
||||
2: 'Pumped Off',
|
||||
3: 'Faulted',
|
||||
4: 'Starting',
|
||||
5: 'Recovering',
|
||||
100: 'Read Error',
|
||||
1000: 'PLC Error',
|
||||
9999: 'No Response'
|
||||
}
|
||||
st_response = requests.get(self.device_address + "/json/status")
|
||||
if st_response.status_code == 200:
|
||||
data = json.loads(st_response.text)
|
||||
date = data["ISOdate"]
|
||||
status = statusMap[int(data["status"])]
|
||||
|
||||
if channels["status"]["last_value"] != status:
|
||||
self.statusChanged = True
|
||||
print "Status has changed from {0} to {1} @ {2}".format(channels["status"]["last_value"], status, time.time())
|
||||
else:
|
||||
self.statusChanged = False
|
||||
|
||||
if self.statusChanged or self.forceSend:
|
||||
self.status = status
|
||||
# timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
|
||||
reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d*)Z"
|
||||
fd = re.search(reg, date)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
|
||||
self.sendtodb("status", status, timestamp)
|
||||
channels["status"]["last_value"] = status
|
||||
self.checkLatestCard()
|
||||
|
||||
def checkDailyTotals(self):
|
||||
data = json.loads(requests.get(self.device_address + "/json/totals").text)
|
||||
total = data["totals"]
|
||||
if total['status'] == "success":
|
||||
timestamp = 0
|
||||
for val in total['values']:
|
||||
if dt_channels.has_key(val['name']):
|
||||
if ((time.time() - int(dt_channels[val['name']]['last_time_uploaded'])) > int(dt_channels[val['name']]['min_time_between_uploads'])):
|
||||
if (float(val['value']) >= (float(dt_channels[val['name']]["last_value"]) + float(dt_channels[val['name']]["change_amount"]))) or (float(val['value']) <= (float(dt_channels[val['name']]["last_value"]) - float(dt_channels[val['name']]["change_amount"]))):
|
||||
print("[dailyTotal] {0}: {1}".format(val['name'], val['value']))
|
||||
self.sendtodb(dt_channels[val['name']]["meshify_channel"], float(val['value']), timestamp)
|
||||
dt_channels[val['name']]["last_value"] = float(val['value'])
|
||||
dt_channels[val['name']]["last_time_uploaded"] = time.time()
|
||||
else:
|
||||
print("checkDailyTotalsError: {0}".format(total.message))
|
||||
|
||||
def checkGaugeOffData(self):
|
||||
data = json.loads(requests.get(self.device_address + "/json/history").text)
|
||||
day = data["hist"]
|
||||
date = day['gauge_date']
|
||||
# print day["gauge_date"]
|
||||
# timestamp = time.mktime(time.strptime(day["gauge_date"], '%Y-%m-%dT%H:%M:%S.%fZ'))
|
||||
reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d*)Z"
|
||||
fd = re.search(reg, date)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
|
||||
# timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
|
||||
for entry in day:
|
||||
if go_channels.has_key(entry):
|
||||
# "percent_run":{"meshifyName":"go_percent_run","last_value":"","last_send_time":0,"data_type":"float","change_amount":0},
|
||||
if go_channels[entry]["last_value"] != day[entry]:
|
||||
print entry, day[entry]
|
||||
print go_channels[entry]["meshifyName"], day[entry], timestamp
|
||||
self.sendtodb(go_channels[entry]["meshifyName"], day[entry], timestamp)
|
||||
go_channels[entry]["last_value"] = day[entry]
|
||||
|
||||
def checkLatestCard(self):
|
||||
latest = requests.get(self.device_address + "/json/latest")
|
||||
latest = json.loads(latest.text)
|
||||
folder = str(latest["folder"])
|
||||
file = latest["file"].replace(".csv", "")
|
||||
|
||||
# check the card to see if its new
|
||||
# 1. if its new send the folder/file_name to the card_history channel
|
||||
# 2. if its new and its been 10 minutes since you last sent an entire card, then send up all of the data
|
||||
|
||||
if channels["card_history"]["last_value"] != (folder + "/" + file):
|
||||
# we have a new card
|
||||
# get the data for this event
|
||||
data = json.loads(requests.get(self.device_address + "/json/" + folder + "/" + file).text)
|
||||
dateTime = str(data["contents"]["utctime"])
|
||||
|
||||
# timestamp = time.mktime(time.strptime(dateTime, '%Y-%m-%d %H:%M:%S.%f'))
|
||||
|
||||
reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2}).(\d*)"
|
||||
fd = re.search(reg, dateTime)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
card_timestamp = int(time.mktime(dt.timetuple()))
|
||||
|
||||
print "New card detected @ {0}".format(datetime.strftime(datetime.fromtimestamp(timestamp), "%Y-%m-%d %H:%M:%S.%f"))
|
||||
# set the last value = to current value and upload your data
|
||||
channels["card_history"]["last_value"] = (folder + "/" + file)
|
||||
self.sendtodb("card_history", (folder + "/" + file), card_timestamp)
|
||||
|
||||
# check the last time the card was updated
|
||||
if (time.time() - int(channels["card_history"]["last_time_uploaded"])) > self.cardLoopTimer or self.statusChanged or self.forceSend:
|
||||
# its been 10 minutes, send the full upload
|
||||
print "Either status has changed or last stored card is too old."
|
||||
channels["card_history"]["last_time_uploaded"] = time.time()
|
||||
self.process_card(data, timestamp, card_timestamp, sendCards=True)
|
||||
return
|
||||
else:
|
||||
self.process_card(data, timestamp, card_timestamp, sendCards=False)
|
||||
|
||||
def process_card(self, data, data_timestamp, card_timestamp, sendCards=False):
|
||||
# if sendCards = True then we upload all data no matter what, including cards
|
||||
|
||||
# check what type of data it is
|
||||
# check if its changed, if it has, how long has it been since it changed
|
||||
# NOTE: the initial vaue of "" is given to all channels in the channels object,
|
||||
# so to avoid comparing a string to a float, and to make sure on startup we send all of the values, the first time through we send everything that has a "" as its last value
|
||||
|
||||
# We don't want to store any data on starting, just the cards
|
||||
if self.status != 'Starting':
|
||||
for channel in data["contents"]:
|
||||
if channels.has_key(channel):
|
||||
if channels[channel]["data_type"] == "str":
|
||||
if (data["contents"][channel] != channels[channel]["last_value"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
|
||||
print "new value for: ", channel
|
||||
print data["contents"][channel]
|
||||
self.sendtodb(channel, str(data["contents"][channel]), int(data_timestamp))
|
||||
channels[channel]["last_value"] = data["contents"][channel]
|
||||
channels[channel]["last_time_uploaded"] = time.time()
|
||||
if channels[channel]["data_type"] == "float" or channels[channel]["data_type"] == "int":
|
||||
if channels[channel]["last_value"] == "":
|
||||
# print "first time getting data"
|
||||
print "new value for: ", channel
|
||||
print data["contents"][channel]
|
||||
self.sendtodb(channel, str(data["contents"][channel]), int(data_timestamp))
|
||||
channels[channel]["last_value"] = data["contents"][channel]
|
||||
channels[channel]["last_time_uploaded"] = time.time()
|
||||
if (abs(float(data["contents"][channel]) - float(channels[channel]["last_value"])) > channels[channel]["change_amount"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
|
||||
# print "first time getting data"
|
||||
print "new value for: ", channel
|
||||
print data["contents"][channel]
|
||||
self.sendtodb(channel, str(data["contents"][channel]), int(data_timestamp))
|
||||
channels[channel]["last_value"] = data["contents"][channel]
|
||||
channels[channel]["last_time_uploaded"] = time.time()
|
||||
|
||||
if sendCards:
|
||||
sc = data["s"]
|
||||
dc = data["d"]
|
||||
for i in range(len(data["d"])):
|
||||
try:
|
||||
for x in range(len(data["d"][i])):
|
||||
data["d"][i][x] = float('%.3f' % data["d"][i][x])
|
||||
except Exception, e:
|
||||
print e
|
||||
for i in range(len(data["s"])):
|
||||
try:
|
||||
for x in range(len(data["s"][i])):
|
||||
data["s"][i][x] = float('%.3f' % data["s"][i][x])
|
||||
except Exception, e:
|
||||
print e
|
||||
|
||||
sc = data["s"]
|
||||
dc = data["d"]
|
||||
newSc = "["
|
||||
for i in sc:
|
||||
try:
|
||||
if i[0] is None:
|
||||
continue
|
||||
if i[0] != 0.0 and i[1] != 0.0:
|
||||
newSc += "[" + str(i[0]) + "," + str(i[1]) + "],"
|
||||
except:
|
||||
pass
|
||||
newSc += "[" + str(sc[0][0]) + "," + str(sc[0][1]) + "]"
|
||||
newSc += "]"
|
||||
|
||||
newDc = "["
|
||||
for i in dc:
|
||||
try:
|
||||
if i[0] is None:
|
||||
continue
|
||||
if i[0] != 0.0 and i[1] != 0.0:
|
||||
newDc += "[" + str(i[0]) + "," + str(i[1]) + "],"
|
||||
except:
|
||||
pass
|
||||
newDc += "[" + str(dc[0][0]) + "," + str(dc[0][1]) + "]"
|
||||
newDc += "]"
|
||||
|
||||
self.sendtodb("sc", newSc, card_timestamp)
|
||||
self.sendtodb("dc", newDc, card_timestamp)
|
||||
|
||||
def getLatestXCards(self, numCards):
|
||||
data = json.loads(requests.get(self.device_address + "/json/latest/" + str(int(numCards))).text)
|
||||
for card in data['cards']:
|
||||
card_data = json.loads(requests.get(self.device_address + "/json/" + data['folder'] + "/" + card).text)
|
||||
dateTime = str(card_data["contents"]["utctime"])
|
||||
# timestamp = time.mktime(time.strptime(dateTime, '%Y-%m-%d %H:%M:%S.%f'))
|
||||
|
||||
reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2}).(\d*)"
|
||||
fd = re.search(reg, dateTime)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
card_timestamp = int(time.mktime(dt.timetuple()))
|
||||
|
||||
channels["card_history"]["last_value"] = (data['folder'] + "/" + card)
|
||||
self.sendtodb("card_history", (data['folder'] + "/" + card), card_timestamp)
|
||||
self.process_card(card_data, timestamp, card_timestamp, sendCards=True)
|
||||
|
||||
def getDataLoggerStatus(self):
|
||||
data = json.loads(requests.get(self.device_address + "/json/pythonstatus/").text)
|
||||
al_status = "Not OK"
|
||||
if data['status']['alarmLogger']:
|
||||
al_status = "OK"
|
||||
|
||||
if al_status != self.al_status_last:
|
||||
self.sendtodb("alarmlogger_status", al_status, 0)
|
||||
self.al_status_last = al_status
|
||||
|
||||
dl_status = "Not OK"
|
||||
if data['status']['dataLogger']:
|
||||
dl_status = "OK"
|
||||
if al_status != self.dl_status_last:
|
||||
self.sendtodb("datalogger_status", dl_status, 0)
|
||||
self.dl_status_last = dl_status
|
||||
|
||||
def poc_get_card(self, name, value):
|
||||
self.getcard(value)
|
||||
|
||||
def poc_sync(self, name, value):
|
||||
self.sendtodb("connected", "true", 0)
|
||||
return True
|
||||
|
||||
def poc_set_address(self, name, value):
|
||||
self.device_address = value
|
||||
return True
|
||||
|
||||
def poc_refresh_data(self, name, value):
|
||||
self.forceSend = True
|
||||
return True
|
||||
446
POCloud/w_mysql/poc.py
Normal file
@@ -0,0 +1,446 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import types
|
||||
import traceback
|
||||
import binascii
|
||||
import threading
|
||||
import time
|
||||
import thread
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import serial
|
||||
import minimalmodbus
|
||||
import pickle
|
||||
import re
|
||||
from device_base import deviceBase
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
try:
|
||||
import json
|
||||
except:
|
||||
import simplejson as json
|
||||
import calendar
|
||||
|
||||
|
||||
def min_max_check(val, min, max):
|
||||
if val < min:
|
||||
return min
|
||||
elif val > max:
|
||||
return max
|
||||
else:
|
||||
return val
|
||||
|
||||
go_channels = {
|
||||
"percent_run": {"meshifyName": "go_percent_run", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"kWh": {"meshifyName": "go_kwh", "last_value": "", "last_send_time": 0, "data_type": " float", "change_amount": 0},
|
||||
"electricity_cost": {"meshifyName": "go_electricity_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"peak_load": {"meshifyName": "go_peak_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"min_load": {"meshifyName": "go_min_load", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"average_SPM": {"meshifyName": "go_average_spm", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"production_calculated": {"meshifyName": "go_production_calculated", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"full_card_production": {"meshifyName": "go_full_card_production", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"polished_rod_HP": {"meshifyName": "go_polished_rod_hp", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"lifting_cost": {"meshifyName": "go_lifting_cost", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"fluid_above_pump": {"meshifyName": "go_fluid_above_pump", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"pump_intake_pressure": {"meshifyName": "go_pump_intake_pressure", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"kWh_regen": {"meshifyName": "go_kwh_regen", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
"inflow_rate": {"meshifyName": "go_inflow_rate", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
}
|
||||
|
||||
stroke_data_min_upload_time = 300 # seconds
|
||||
|
||||
channels = {
|
||||
"status": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
|
||||
"card_history": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": 0},
|
||||
"well_name": {"last_value": "", "data_type": "str", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"tubing_head_pressure": {"last_value": "", "data_type": "float", "change_amount": 5, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"fluid_gradient": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"stuffing_box_friction": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"dt": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_adjusted_gross_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_net_stroke": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_fluid_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"surface_max_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"surface_min_load": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"tubing_movement": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"surface_stroke_length": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"fillage_percent": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"polished_rod_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"pump_hp": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"SPM": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"fluid_above_pump": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"pump_intake_pressure": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"stroke_production": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"drive_torque_mode": {"last_value": "", "data_type": "int", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"torque_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"speed_reference": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_min_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
"downhole_max_position": {"last_value": "", "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": stroke_data_min_upload_time},
|
||||
}
|
||||
|
||||
|
||||
total_min_upload_time = 300 # seconds
|
||||
dt_channels = { # Current Daily Totals
|
||||
"Average_SPM": {"meshify_channel": "dt_average_spm", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Downhole_Net_Stroke": {"meshify_channel": "dt_downhole_net_stroke", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Electricity_Cost": {"meshify_channel": "dt_electricity_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Fluid_Level": {"meshify_channel": "dt_fluid_level", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Full_Card_Production": {"meshify_channel": "dt_full_card_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Inflow_Rate": {"meshify_channel": "dt_inflow_rate", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"kWh": {"meshify_channel": "dt_kWh", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"kWh_Regen": {"meshify_channel": "dt_kWh_regen", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Lifting_Cost": {"meshify_channel": "dt_lifting_cost", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Peak_Load": {"meshify_channel": "dt_peak_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Min_Load": {"meshify_channel": "dt_min_load", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Percent_Run": {"meshify_channel": "dt_percent_run", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Polished_Rod_HP": {"meshify_channel": "dt_polished_rod_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Calculated_Production": {"meshify_channel": "dt_calculated_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Projected_Production": {"meshify_channel": "dt_projected_production", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Pump_HP": {"meshify_channel": "dt_pump_hp", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Pump_Intake_Presure": {"meshify_channel": "dt_pump_intake_pressure", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Surface_Stroke_Length": {"meshify_channel": "dt_surface_stroke_length", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
"Tubing_Movement": {"meshify_channel": "dt_tubing_movement", "last_value": 0, "data_type": "float", "change_amount": 0, "last_time_uploaded": 0, "min_time_between_uploads": total_min_upload_time},
|
||||
}
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.forceSend = True
|
||||
self.version = "2"
|
||||
# self.device_address = "http://192.168.1.30/"
|
||||
self.device_address = "http://localhost/"
|
||||
self.cardLoopTimer = 600
|
||||
self.finished = threading.Event()
|
||||
threading.Thread.start(self)
|
||||
self.statusChanged = False
|
||||
self.al_status_last = False
|
||||
self.dl_status_last = False
|
||||
|
||||
# load stored event ID's
|
||||
try:
|
||||
with open('eventIds.p', 'rb') as handle:
|
||||
self.eventIds = pickle.load(handle)
|
||||
|
||||
print "found pickled eventID dictionary: {0}".format(self.eventIds)
|
||||
except:
|
||||
print "couldn't load enent ID's from pickle"
|
||||
self.eventIds = []
|
||||
|
||||
# load stored wellconfig's
|
||||
try:
|
||||
with open('wellSetup.p', 'rb') as handle:
|
||||
self.wellSetup = pickle.load(handle)
|
||||
|
||||
print "Found pickled Well Setup (but it's going to be too long to print)"
|
||||
# print self.wellConfig
|
||||
except:
|
||||
print "couldn't load Well Setup from pickle"
|
||||
self.wellSetup = []
|
||||
|
||||
self.sendtodbJSON("device_address", self.device_address, 0)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
channels["status"]["last_value"] = ""
|
||||
|
||||
def run(self):
|
||||
self.runLoopStatus = ""
|
||||
checkBackupEvery = 100
|
||||
checkBackupSkipped = 1
|
||||
while True:
|
||||
try:
|
||||
runLoopStatus = "checkEvents"
|
||||
self.checkEvents()
|
||||
|
||||
runLoopStatus = "checkStatus"
|
||||
self.checkStatus()
|
||||
|
||||
runLoopStatus = "checkDailyTotals"
|
||||
self.checkDailyTotals()
|
||||
|
||||
runLoopStatus = "checkGaugeOffData"
|
||||
self.checkGaugeOffData()
|
||||
|
||||
runLoopStatus = "getDataLoggerStatus()"
|
||||
self.getDataLoggerStatus()
|
||||
|
||||
if self.statusChanged:
|
||||
runLoopStatus = "getLatestXCards"
|
||||
self.getLatestXCards(5)
|
||||
else:
|
||||
runLoopStatus = "checkLatestCard"
|
||||
self.checkLatestCard()
|
||||
|
||||
if self.forceSend or (checkBackupSkipped > checkBackupEvery):
|
||||
runLoopStatus = "checkBackup"
|
||||
self.checkBackup()
|
||||
checkBackupSkipped = 0
|
||||
checkBackupSkipped = checkBackupSkipped + 1
|
||||
|
||||
runLoopStatus = "Complete"
|
||||
time.sleep(3)
|
||||
self.forceSend = False
|
||||
except Exception, e:
|
||||
sleep_timer = 20
|
||||
print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer)
|
||||
time.sleep(sleep_timer)
|
||||
|
||||
def checkBackup(self):
|
||||
backupList = json.loads(requests.get(self.device_address + "/json/backups").text)
|
||||
file = backupList["backups"][0]
|
||||
data = json.loads(requests.get(self.device_address + "/json/backups/" + file).text)
|
||||
timestamp = time.time()
|
||||
if data != self.wellSetup or self.forceSend:
|
||||
self.sendtodbJSON("well_setup", json.dumps(data), timestamp)
|
||||
self.wellSetup = data
|
||||
with open('wellSetup.p', 'wb') as handle:
|
||||
pickle.dump(self.wellSetup, handle)
|
||||
|
||||
def checkEvents(self):
|
||||
data = json.loads(requests.get(self.device_address + "/json/event_list").text)
|
||||
events = data["events"]
|
||||
for event in events:
|
||||
if int(event["id"]) not in self.eventIds:
|
||||
timestamp = calendar.timegm(time.strptime(event["datetime"], '%Y-%m-%dT%H:%M:%S.%fZ'))
|
||||
# we have a new event
|
||||
self.sendtodbJSON("events", json.dumps(event), timestamp)
|
||||
self.eventIds.append(int(event["id"]))
|
||||
if len(self.eventIds) > 50:
|
||||
del self.eventIds[0]
|
||||
with open('eventIds.p', 'wb') as handle:
|
||||
pickle.dump(self.eventIds, handle)
|
||||
|
||||
def checkStatus(self):
|
||||
statusMap = {
|
||||
0: 'Stopped',
|
||||
1: 'Running',
|
||||
2: 'Pumped Off',
|
||||
3: 'Faulted',
|
||||
4: 'Starting',
|
||||
5: 'Recovering',
|
||||
100: 'Read Error',
|
||||
1000: 'PLC Error',
|
||||
9999: 'No Response'
|
||||
}
|
||||
st_response = requests.get(self.device_address + "/json/status")
|
||||
if st_response.status_code == 200:
|
||||
data = json.loads(st_response.text)
|
||||
date = data["ISOdate"]
|
||||
status = statusMap[int(data["status"])]
|
||||
|
||||
if channels["status"]["last_value"] != status:
|
||||
self.statusChanged = True
|
||||
print "Status has changed from {0} to {1} @ {2}".format(channels["status"]["last_value"], status, time.time())
|
||||
else:
|
||||
self.statusChanged = False
|
||||
|
||||
if self.statusChanged or self.forceSend:
|
||||
self.status = status
|
||||
reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z"
|
||||
fd = re.search(reg, date)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
|
||||
# timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
self.sendtodb("status", status, timestamp)
|
||||
channels["status"]["last_value"] = status
|
||||
|
||||
def checkDailyTotals(self):
|
||||
data = json.loads(requests.get(self.device_address + "/json/totals").text)
|
||||
total = data["totals"]
|
||||
if total['status'] == "success":
|
||||
timestamp = 0
|
||||
for val in total['values']:
|
||||
if dt_channels.has_key(val['name']):
|
||||
if ((time.time() - int(dt_channels[val['name']]['last_time_uploaded'])) > int(dt_channels[val['name']]['min_time_between_uploads'])):
|
||||
if (float(val['value']) >= (float(dt_channels[val['name']]["last_value"]) + float(dt_channels[val['name']]["change_amount"]))) or (float(val['value']) <= (float(dt_channels[val['name']]["last_value"]) - float(dt_channels[val['name']]["change_amount"]))):
|
||||
print("[dailyTotal] {0}: {1}".format(val['name'], val['value']))
|
||||
self.sendtodb(dt_channels[val['name']]["meshify_channel"], float(val['value']), timestamp)
|
||||
dt_channels[val['name']]["last_value"] = float(val['value'])
|
||||
dt_channels[val['name']]["last_time_uploaded"] = time.time()
|
||||
else:
|
||||
print("checkDailyTotalsError: {0}".format(total.message))
|
||||
|
||||
def checkGaugeOffData(self):
|
||||
data = json.loads(requests.get(self.device_address + "/json/history").text)
|
||||
day = data["hist"]
|
||||
date = day['gauge_date']
|
||||
# print day["gauge_date"]
|
||||
# timestamp = time.mktime(time.strptime(day["gauge_date"], '%Y-%m-%dT%H:%M:%S.%fZ'))
|
||||
|
||||
reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z"
|
||||
fd = re.search(reg, date)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
|
||||
# timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
for entry in day:
|
||||
if go_channels.has_key(entry):
|
||||
# "percent_run": {"meshifyName": "go_percent_run", "last_value": "", "last_send_time": 0, "data_type": "float", "change_amount": 0},
|
||||
if go_channels[entry]["last_value"] != day[entry]:
|
||||
print entry, day[entry]
|
||||
print go_channels[entry]["meshifyName"], day[entry], timestamp
|
||||
self.sendtodb(go_channels[entry]["meshifyName"], day[entry], timestamp)
|
||||
go_channels[entry]["last_value"] = day[entry]
|
||||
|
||||
def checkLatestCard(self):
|
||||
latest = requests.get(self.device_address + "/json/latest")
|
||||
latest = json.loads(latest.text)
|
||||
folder = str(latest["folder"])
|
||||
file = latest["file"].replace(".csv", "")
|
||||
|
||||
# check the card to see if its new
|
||||
# 1. if its new send the folder/file_name to the card_history channel
|
||||
# 2. if its new and its been 10 minutes since you last sent an entire card, then send up all of the data
|
||||
|
||||
if channels["card_history"]["last_value"] != (folder + "/" + file):
|
||||
# we have a new card
|
||||
# get the data for this event
|
||||
data = json.loads(requests.get(self.device_address + "/json/" + folder + "/" + file).text)
|
||||
dateTime = str(data["card_data"]["Stroke_Time"])
|
||||
# timestamp = time.mktime(time.strptime(dateTime, '%Y-%m-%dT%H:%M:%S.%fZ'))
|
||||
|
||||
reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z"
|
||||
fd = re.search(reg, dateTime)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
card_timestamp = int(time.mktime(dt.timetuple()))
|
||||
|
||||
print "New card detected @ {0}".format(datetime.strftime(datetime.fromtimestamp(timestamp), "%Y-%m-%d %H:%M:%S.%f"))
|
||||
# set the last value = to current value and upload your data
|
||||
channels["card_history"]["last_value"] = (folder + "/" + file)
|
||||
self.sendtodb("card_history", (folder + "/" + file), timestamp)
|
||||
|
||||
# check the last time the card was updated
|
||||
if (time.time() - int(channels["card_history"]["last_time_uploaded"])) > self.cardLoopTimer or self.statusChanged or self.forceSend:
|
||||
# its been 10 minutes, send the full upload
|
||||
print "Either status has changed or last stored card is too old."
|
||||
channels["card_history"]["last_time_uploaded"] = time.time()
|
||||
self.process_card(data, timestamp, card_timestamp, sendCards=True)
|
||||
return
|
||||
else:
|
||||
self.process_card(data, timestamp, card_timestamp, sendCards=False)
|
||||
|
||||
def process_card(self, data, data_timestamp, card_timestamp, sendCards=False):
|
||||
|
||||
# if sendCards = True then we upload all data no matter what, including cards
|
||||
|
||||
# check what type of data it is
|
||||
# check if its changed, if it has, how long has it been since it changed
|
||||
# NOTE: the initial vaue of "" is given to all channels in the channels object,
|
||||
# so to avoid comparing a string to a float, and to make sure on startup we send all of the values, the first time through we send everything that has a "" as its last value
|
||||
|
||||
# We don't want to store any data on starting, just the cards
|
||||
if self.status != 'Starting':
|
||||
for channel in data["card_data"]:
|
||||
if channels.has_key(channel):
|
||||
if channels[channel]["data_type"] == "str":
|
||||
if (data["card_data"][channel] != channels[channel]["last_value"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
|
||||
print "new value for: ", channel
|
||||
print data["card_data"][channel]
|
||||
self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
|
||||
channels[channel]["last_value"] = data["card_data"][channel]
|
||||
channels[channel]["last_time_uploaded"] = time.time()
|
||||
if channels[channel]["data_type"] == "float" or channels[channel]["data_type"] == "int":
|
||||
if channels[channel]["last_value"] == "":
|
||||
# print "first time getting data"
|
||||
print "new value for: ", channel
|
||||
print data["card_data"][channel]
|
||||
self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
|
||||
channels[channel]["last_value"] = data["card_data"][channel]
|
||||
channels[channel]["last_time_uploaded"] = time.time()
|
||||
if (abs(float(data["card_data"][channel]) - float(channels[channel]["last_value"])) > channels[channel]["change_amount"] and ((time.time() - int(channels[channel]["last_time_uploaded"])) > int(channels[channel]["min_time_between_uploads"]))) or sendCards:
|
||||
# print "first time getting data"
|
||||
print "new value for: ", channel
|
||||
print data["card_data"][channel]
|
||||
self.sendtodb(channel, str(data["card_data"][channel]), int(data_timestamp))
|
||||
channels[channel]["last_value"] = data["card_data"][channel]
|
||||
channels[channel]["last_time_uploaded"] = time.time()
|
||||
|
||||
if sendCards:
|
||||
s_p = data["card_data"]["Surface_Position"]
|
||||
s_l = data["card_data"]["Surface_Load"]
|
||||
d_p = data["card_data"]["Downhole_Position"]
|
||||
d_l = data["card_data"]["Downhole_Load"]
|
||||
newSc = "["
|
||||
newDc = "["
|
||||
|
||||
for i in range(len(s_p)):
|
||||
try:
|
||||
if s_p[i] is None:
|
||||
continue
|
||||
if s_p[i] != 0.0 and s_l[i] != 0.0:
|
||||
newSc += "[" + str(s_p[i]) + ", " + str(s_l[i]) + "], "
|
||||
except:
|
||||
pass
|
||||
newSc += "[" + str(s_p[0]) + ", " + str(s_l[0]) + "]"
|
||||
newSc += "]"
|
||||
|
||||
for i in range(len(d_p)):
|
||||
try:
|
||||
if d_p[i] is None:
|
||||
continue
|
||||
if d_p[i] != 0.0 and d_l[i] != 0.0:
|
||||
newDc += "[" + str(d_p[i]) + ", " + str(d_l[i]) + "], "
|
||||
except:
|
||||
pass
|
||||
newDc += "[" + str(d_p[0]) + ", " + str(d_l[0]) + "]"
|
||||
newDc += "]"
|
||||
|
||||
self.sendtodb("sc", newSc, card_timestamp)
|
||||
self.sendtodb("dc", newDc, card_timestamp)
|
||||
|
||||
def getLatestXCards(self, numCards):
|
||||
data = json.loads(requests.get(self.device_address + "/json/latest/" + str(int(numCards))).text)
|
||||
for card in data['cards']:
|
||||
card_data = json.loads(requests.get(self.device_address + "/json/" + data['folder'] + "/" + card).text)
|
||||
dateTime = str(card_data["card_data"]["Stroke_Time"])
|
||||
# timestamp = time.mktime(time.strptime(dateTime,'%Y-%m-%dT%H:%M:%S.%fZ'))
|
||||
reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z"
|
||||
fd = re.search(reg, dateTime)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
card_timestamp = int(time.mktime(dt.timetuple()))
|
||||
|
||||
channels["card_history"]["last_value"] = (data['folder'] + "/" + card)
|
||||
self.sendtodb("card_history", (data['folder'] + "/" + card), card_timestamp)
|
||||
self.process_card(card_data, timestamp, sendCards=True)
|
||||
|
||||
def getDataLoggerStatus(self):
|
||||
data = json.loads(requests.get(self.device_address + "/json/pythonstatus/").text)
|
||||
al_status = "Not OK"
|
||||
if data['status']['alarmLogger']:
|
||||
al_status = "OK"
|
||||
|
||||
if al_status != self.al_status_last:
|
||||
self.sendtodb("alarmlogger_status", al_status, 0)
|
||||
self.al_status_last = al_status
|
||||
|
||||
dl_status = "Not OK"
|
||||
if data['status']['dataLogger']:
|
||||
dl_status = "OK"
|
||||
if al_status != self.dl_status_last:
|
||||
self.sendtodb("datalogger_status", dl_status, 0)
|
||||
self.dl_status_last = dl_status
|
||||
|
||||
def poc_get_card(self, name, value):
|
||||
self.getcard(value)
|
||||
|
||||
def poc_sync(self, name, value):
|
||||
self.sendtodb("connected", "true", 0)
|
||||
return True
|
||||
|
||||
def poc_set_address(self, name, value):
|
||||
self.device_address = value
|
||||
return True
|
||||
|
||||
def poc_refresh_data(self, name, value):
|
||||
self.forceSend = True
|
||||
return True
|
||||
567
POCloud/w_sqlite/poc.py
Normal file
@@ -0,0 +1,567 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import types
|
||||
import traceback
|
||||
import binascii
|
||||
import threading
|
||||
import time
|
||||
import thread
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import serial
|
||||
import minimalmodbus
|
||||
import pickle
|
||||
import re
|
||||
from device_base import deviceBase
|
||||
from datetime import datetime
|
||||
import traceback
|
||||
|
||||
import requests
|
||||
try:
|
||||
import json
|
||||
except:
|
||||
import simplejson as json
|
||||
import calendar
|
||||
|
||||
|
||||
class Channel():
|
||||
def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec):
|
||||
self.mesh_name = mesh_name
|
||||
self.data_type = data_type
|
||||
self.last_value = None
|
||||
self.value = None
|
||||
self.last_send_time = 0
|
||||
self.chg_threshold = chg_threshold
|
||||
self.guarantee_sec = guarantee_sec
|
||||
|
||||
def checkSend(self, newVal, force):
|
||||
v = self.data_type(newVal)
|
||||
if self.data_type == bool or self.data_type == str:
|
||||
if (self.last_send_time == 0) or (self.value is None) or not (self.value == v) or ((self.guarantee_sec > 0) and ((time.time() - self.last_send_time) > self.guarantee_sec)) or (force):
|
||||
self.last_value = self.value
|
||||
self.value = v
|
||||
self.last_send_time = time.time()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
if (self.last_send_time == 0) or (self.value is None) or (abs(self.value - v) > self.chg_threshold) or ((self.guarantee_sec > 0) and ((time.time() - self.last_send_time) > self.guarantee_sec)) or (force):
|
||||
self.last_value = self.value
|
||||
self.value = v
|
||||
self.last_send_time = time.time()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
|
||||
go_channels = {
|
||||
'electricity_cost': Channel('go_electricity_cost', float, 0.0, 0),
|
||||
'percent_run': Channel('go_percent_run', float, 0.0, 0),
|
||||
'average_SPM': Channel('go_average_spm', float, 0.0, 0),
|
||||
'peak_load': Channel('go_peak_load', float, 0.0, 0),
|
||||
'polished_rod_HP': Channel('go_polished_rod_hp', float, 0.0, 0),
|
||||
'lifting_cost': Channel('go_lifting_cost', float, 0.0, 0),
|
||||
'full_card_production': Channel('go_full_card_production', float, 0.0, 0),
|
||||
'fluid_above_pump': Channel('go_fluid_above_pump', float, 0.0, 0),
|
||||
'production_calculated': Channel('go_production_calculated', float, 0.0, 0),
|
||||
'kWh': Channel('go_kwh', float, 0.0, 3600),
|
||||
'inflow_rate': Channel('go_inflow_rate', float, 0.0, 0),
|
||||
'kWh_regen': Channel('go_kwh_regen', float, 0.0, 0),
|
||||
'pump_intake_pressure': Channel('go_pump_intake_pressure', float, 0.0, 0),
|
||||
'min_load': Channel('go_min_load', float, 0.0, 0),
|
||||
}
|
||||
|
||||
card_channels = {
|
||||
"card_id": Channel("card_history", int, 0, 0),
|
||||
"card_type": Channel("cardtype", str, 0, 0),
|
||||
"card": Channel(None, str, 0, 600)
|
||||
}
|
||||
|
||||
status = Channel('status', str, 0, 0)
|
||||
|
||||
tag_channels = {
|
||||
'polished_rod_hp': Channel('polished_rod_hp', float, 1.0, 3600),
|
||||
'drive_torque_mode': Channel('drive_torque_mode', int, 1.0, 3600),
|
||||
'downhole_gross_stroke': Channel('downhole_gross_stroke', float, 1.0, 3600),
|
||||
'fluid_gradient': Channel('fluid_gradient', float, 0.01, 3600),
|
||||
'tubing_head_pressure': Channel('tubing_head_pressure', float, 10.0, 3600),
|
||||
'surface_min_load': Channel('surface_min_load', float, 500.0, 3600),
|
||||
'downhole_fluid_load': Channel('downhole_fluid_load', float, 500.0, 3600),
|
||||
'downhole_max_position': Channel('downhole_max_position', float, 1.0, 3600),
|
||||
'downhole_net_stroke': Channel('downhole_net_stroke', float, 1.0, 3600),
|
||||
'fillage_percent': Channel('fillage_percent', float, 5.0, 3600),
|
||||
'pump_hp': Channel('pump_hp', float, 1.0, 3600),
|
||||
'spm': Channel('SPM', float, 0.5, 3600),
|
||||
'pump_intake_pressure': Channel('pump_intake_pressure', float, 200.0, 3600),
|
||||
'speed_reference': Channel('speed_reference', float, 50.0, 3600),
|
||||
'downhole_min_position': Channel('downhole_min_position', float, 1.0, 3600),
|
||||
'tubing_movement': Channel('tubing_movement', float, 1.0, 3600),
|
||||
'surface_max_load': Channel('surface_max_load', float, 500.0, 3600),
|
||||
'stuffing_box_friction': Channel('stuffing_box_friction', float, 50.0, 3600),
|
||||
'dt': Channel('dt', float, 0.001, 3600),
|
||||
'fluid_level': Channel('fluid_above_pump', float, 100.0, 3600),
|
||||
'torque_reference': Channel('torque_reference', float, 5.0, 3600),
|
||||
'surface_stroke_length': Channel('surface_stroke_length', float, 1.0, 3600),
|
||||
'downhole_adjusted_gross_stroke': Channel('downhole_adjusted_gross_stroke', float, 1.0, 3600),
|
||||
'stroke_production': Channel('stroke_production', float, 0.001, 3600),
|
||||
}
|
||||
|
||||
dt_channels = { # Current Daily Totals
|
||||
'Electricity_Cost': Channel('dt_electricity_cost', float, 1.0, 3600),
|
||||
'Downhole_Net_Stroke': Channel('dt_downhole_net_stroke', float, 2.0, 3600),
|
||||
'Tubing_Movement': Channel('dt_tubing_movement', float, 1.0, 3600),
|
||||
'Average_SPM': Channel('dt_average_spm', float, 0.50, 3600),
|
||||
'Peak_Load': Channel('dt_peak_load', float, 500.0, 3600),
|
||||
'kWh': Channel('dt_kWh', float, 5.0, 3600),
|
||||
'Pump_HP': Channel('dt_pump_hp', float, 0.5, 3600),
|
||||
'Percent_Run': Channel('dt_percent_run', float, 5.0, 3600),
|
||||
'Projected_Production': Channel('dt_projected_production', float, 5.0, 3600),
|
||||
'Pump_Intake_Presure': Channel('dt_pump_intake_pressure', float, 100.0, 3600),
|
||||
'Inflow_Rate': Channel('dt_inflow_rate', float, 1.0, 3600),
|
||||
'Calculated_Production': Channel('dt_calculated_production', float, 5.0, 3600),
|
||||
'Fluid_Level': Channel('dt_fluid_level', float, 100.0, 3600),
|
||||
'Lifting_Cost': Channel('dt_lifting_cost', float, 1.0, 3600),
|
||||
'Polished_Rod_HP': Channel('dt_polished_rod_hp', float, 1.0, 3600),
|
||||
'kWh_Regen': Channel('dt_kWh_regen', float, 1.0, 3600),
|
||||
'Surface_Stroke_Length': Channel('dt_surface_stroke_length', float, 1.0, 3600),
|
||||
'Full_Card_Production': Channel('dt_full_card_production', float, 10.0, 3600),
|
||||
'Min_Load': Channel('dt_min_load', float, 500.0, 3600),
|
||||
}
|
||||
|
||||
|
||||
class start(threading.Thread, deviceBase):
|
||||
|
||||
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
|
||||
threading.Thread.__init__(self)
|
||||
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
|
||||
|
||||
self.daemon = True
|
||||
self.forceSend = True
|
||||
self.version = "3"
|
||||
self.device_address = "http://192.168.1.30"
|
||||
# self.device_address = "http://localhost"
|
||||
self.cardLoopTimer = 600
|
||||
self.finished = threading.Event()
|
||||
threading.Thread.start(self)
|
||||
self.statusChanged = False
|
||||
self.al_status_last = False
|
||||
self.dl_status_last = False
|
||||
|
||||
# load stored event ID's
|
||||
try:
|
||||
with open('eventIds.p', 'rb') as handle:
|
||||
self.eventIds = pickle.load(handle)
|
||||
|
||||
print "found pickled eventID dictionary: {0}".format(self.eventIds)
|
||||
except:
|
||||
print "couldn't load enent ID's from pickle"
|
||||
self.eventIds = []
|
||||
|
||||
# load stored Well Test ID's
|
||||
try:
|
||||
with open('welltestIDs.p', 'rb') as handle:
|
||||
self.welltestIDs = pickle.load(handle)
|
||||
|
||||
print "found pickled welltestIDs dictionary: {0}".format(self.welltestIDs)
|
||||
except:
|
||||
print "couldn't load well test ID's from pickle"
|
||||
self.welltestIDs = []
|
||||
|
||||
# load stored Fluid Shot ID's
|
||||
try:
|
||||
with open('fluidshotIDs.p', 'rb') as handle:
|
||||
self.fluidshotIDs = pickle.load(handle)
|
||||
|
||||
print "found pickled fluidshotIDs dictionary: {0}".format(self.fluidshotIDs)
|
||||
except:
|
||||
print "couldn't load fluid shot ID's from pickle"
|
||||
self.fluidshotIDs = []
|
||||
|
||||
# load stored note ID's
|
||||
try:
|
||||
with open('noteIDs.p', 'rb') as handle:
|
||||
self.noteIDs = pickle.load(handle)
|
||||
|
||||
print "found pickled noteID dictionary: {0}".format(self.noteIDs)
|
||||
except:
|
||||
print "couldn't load note ID's from pickle"
|
||||
self.noteIDs = []
|
||||
|
||||
# load stored wellconfig's
|
||||
try:
|
||||
with open('wellSetup.p', 'rb') as handle:
|
||||
self.wellSetup = pickle.load(handle)
|
||||
|
||||
print "Found pickled Well Setup (but it's going to be too long to print)"
|
||||
# print self.wellConfig
|
||||
except:
|
||||
print "couldn't load Well Setup from pickle"
|
||||
self.wellSetup = []
|
||||
|
||||
self.sendtodbJSON("device_address", self.device_address, 0)
|
||||
|
||||
# this is a required function for all drivers, its goal is to upload some piece of data
|
||||
# about your device so it can be seen on the web
|
||||
def register(self):
|
||||
channels["status"]["last_value"] = ""
|
||||
|
||||
def run(self):
|
||||
self.runLoopStatus = ""
|
||||
checkBackupEvery = 100
|
||||
checkBackupSkipped = 1
|
||||
while True:
|
||||
try:
|
||||
if self.forceSend:
|
||||
print("!!!!!!!!!!!!!!! FORCE SEND !!!!!!!!!!!!!!!")
|
||||
|
||||
runLoopStatus = "checkStatus"
|
||||
self.checkStatus()
|
||||
|
||||
runLoopStatus = "checkEvents"
|
||||
self.checkEvents()
|
||||
|
||||
runLoopStatus = "checkNotes"
|
||||
self.checkNotes()
|
||||
|
||||
runLoopStatus = "checkWellTests"
|
||||
self.checkWellTests()
|
||||
|
||||
runLoopStatus = "checkFluidShots"
|
||||
self.checkFluidShots()
|
||||
|
||||
runLoopStatus = "checkDailyTotals"
|
||||
self.checkDailyTotals()
|
||||
|
||||
runLoopStatus = "checkGaugeOffData"
|
||||
self.checkGaugeOffData()
|
||||
|
||||
runLoopStatus = "checkStoredValues"
|
||||
self.checkStoredValues(self.forceSend)
|
||||
|
||||
# runLoopStatus = "getDataLoggerStatus()"
|
||||
# self.getDataLoggerStatus()
|
||||
|
||||
if self.statusChanged:
|
||||
runLoopStatus = "getLatestXCards"
|
||||
self.forceSend = True
|
||||
self.checkLatestCard(5)
|
||||
else:
|
||||
runLoopStatus = "checkLatestCard"
|
||||
self.checkLatestCard()
|
||||
|
||||
# if self.forceSend or (checkBackupSkipped > checkBackupEvery):
|
||||
# runLoopStatus = "checkBackup"
|
||||
# self.checkBackup()
|
||||
# checkBackupSkipped = 0
|
||||
# checkBackupSkipped = checkBackupSkipped + 1
|
||||
|
||||
runLoopStatus = "Complete"
|
||||
time.sleep(3)
|
||||
self.forceSend = False
|
||||
except Exception, e:
|
||||
sleep_timer = 20
|
||||
print "Error during {0} of run loop: {1}\nWill try again in {2} seconds...".format(runLoopStatus, e, sleep_timer)
|
||||
traceback.print_exc()
|
||||
time.sleep(sleep_timer)
|
||||
|
||||
def checkBackup(self):
|
||||
try:
|
||||
backupList = json.loads(requests.get(self.device_address + "/json/backups").text)
|
||||
file = backupList["backups"][0]
|
||||
data = json.loads(requests.get(self.device_address + "/json/backups/" + file).text)
|
||||
timestamp = time.time()
|
||||
if data != self.wellSetup or self.forceSend:
|
||||
self.sendtodbJSON("well_setup", json.dumps(data), timestamp)
|
||||
self.wellSetup = data
|
||||
with open('wellSetup.p', 'wb') as handle:
|
||||
pickle.dump(self.wellSetup, handle)
|
||||
except Exception, e:
|
||||
print("checkBackup Error: {}".format(e))
|
||||
|
||||
def checkEvents(self):
|
||||
try:
|
||||
data = json.loads(requests.get(self.device_address + "/json/event_list").text)
|
||||
events = data["events"]
|
||||
for event in events:
|
||||
if int(event["id"]) not in self.eventIds:
|
||||
timestamp = event["datetime"]
|
||||
# we have a new event
|
||||
self.sendtodbJSON("events", json.dumps(event), timestamp)
|
||||
self.eventIds.append(int(event["id"]))
|
||||
if len(self.eventIds) > 50:
|
||||
del self.eventIds[0]
|
||||
with open('eventIds.p', 'wb') as handle:
|
||||
pickle.dump(self.eventIds, handle)
|
||||
except Exception, e:
|
||||
print("checkEvents Error: {}".format(e))
|
||||
|
||||
def checkNotes(self):
|
||||
try:
|
||||
data = json.loads(requests.get(self.device_address + "/json/notes/get").text)
|
||||
notes = data["notes"]
|
||||
for note in notes:
|
||||
if int(note["id"]) not in self.noteIDs:
|
||||
timestamp = calendar.timegm(time.strptime(note["date_time"], '%Y-%m-%d %H:%M:%S'))
|
||||
# we have a new note
|
||||
self.sendtodbJSON("notes", json.dumps(note), timestamp)
|
||||
self.noteIDs.append(int(note["id"]))
|
||||
if len(self.noteIDs) > 50:
|
||||
del self.noteIDs[0]
|
||||
with open('noteIDs.p', 'wb') as handle:
|
||||
pickle.dump(self.noteIDs, handle)
|
||||
except Exception, e:
|
||||
print("checkNotes Error: {}".format(e))
|
||||
|
||||
def checkFluidShots(self):
|
||||
try:
|
||||
data = json.loads(requests.get(self.device_address + "/json/fluid_shot/get").text)
|
||||
fluid_shots = data["fluid_shots"]
|
||||
for shot in fluid_shots:
|
||||
if int(shot["id"]) not in self.fluidshotIDs:
|
||||
timestamp = calendar.timegm(time.strptime(shot["shot_datetime"], '%Y-%m-%d %H:%M:%S'))
|
||||
# we have a new note
|
||||
self.sendtodbJSON("fluidshots", json.dumps(shot), timestamp)
|
||||
self.fluidshotIDs.append(int(shot["id"]))
|
||||
if len(self.fluidshotIDs) > 50:
|
||||
del self.fluidshotIDs[0]
|
||||
with open('fluidshotIDs.p', 'wb') as handle:
|
||||
pickle.dump(self.fluidshotIDs, handle)
|
||||
except Exception, e:
|
||||
print("checkFluidShots Error: {}".format(e))
|
||||
|
||||
def checkWellTests(self):
|
||||
try:
|
||||
data = json.loads(requests.get(self.device_address + "/json/well_test/get").text)
|
||||
well_tests = data["well_tests"]
|
||||
for test in well_tests:
|
||||
if int(test["id"]) not in self.welltestIDs:
|
||||
timestamp = calendar.timegm(time.strptime(test["test_date"], '%Y-%m-%d %H:%M:%S'))
|
||||
# we have a new note
|
||||
self.sendtodbJSON("welltests", json.dumps(test), timestamp)
|
||||
self.welltestIDs.append(int(test["id"]))
|
||||
if len(self.welltestIDs) > 50:
|
||||
del self.welltestIDs[0]
|
||||
with open('welltestIDs.p', 'wb') as handle:
|
||||
pickle.dump(self.welltestIDs, handle)
|
||||
except Exception, e:
|
||||
print("checkWellTests Error: {}".format(e))
|
||||
|
||||
def checkStatus(self):
|
||||
try:
|
||||
global status
|
||||
statusMap = {
|
||||
0: 'Stopped',
|
||||
1: 'Running',
|
||||
2: 'Pumped Off',
|
||||
3: 'Faulted',
|
||||
4: 'Starting',
|
||||
5: 'Recovering',
|
||||
100: 'Read Error',
|
||||
1000: 'PLC Error',
|
||||
9999: 'No Response'
|
||||
}
|
||||
st_response = requests.get(self.device_address + "/json/status")
|
||||
if st_response.status_code == 200:
|
||||
data = json.loads(st_response.text)
|
||||
# date = data["ISOdate"]
|
||||
status_read = data["run_status"]
|
||||
|
||||
if status.last_value != status_read:
|
||||
self.statusChanged = True
|
||||
print "Status has changed from {0} to {1} @ {2}".format(status.last_value, status_read, time.time())
|
||||
else:
|
||||
self.statusChanged = False
|
||||
|
||||
if self.statusChanged or self.forceSend:
|
||||
self.status = status_read
|
||||
# reg = "(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}).(\d{3})Z"
|
||||
# fd = re.search(reg, date)
|
||||
# dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)), int(fd.group(7)))
|
||||
# # timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
|
||||
# timestamp = calendar.timegm(dt.timetuple())
|
||||
self.sendtodb("status", status_read, 0)
|
||||
status.last_value = status_read
|
||||
except Exception, e:
|
||||
print("checkStatus Error: {}".format(e))
|
||||
|
||||
def checkDailyTotals(self):
|
||||
try:
|
||||
data = json.loads(requests.get(self.device_address + "/json/totals").text)
|
||||
if data['status'] == "OK":
|
||||
totals = data["totals"]
|
||||
timestamp = 0
|
||||
for val in totals:
|
||||
if val['name'] in dt_channels:
|
||||
if dt_channels[val['name']].checkSend(val['value'], False):
|
||||
self.sendtodb(dt_channels[val['name']].mesh_name, dt_channels[val['name']].value, timestamp)
|
||||
else:
|
||||
print("checkDailyTotalsError: {0}".format(data.message))
|
||||
except Exception, e:
|
||||
print("checkDailyTotals Error: {}".format(e))
|
||||
|
||||
def checkGaugeOffData(self):
|
||||
try:
|
||||
data = json.loads(requests.get(self.device_address + "/json/history").text)
|
||||
day = data["hist"]
|
||||
date = day['gauge_date']
|
||||
|
||||
reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})"
|
||||
fd = re.search(reg, date)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)))
|
||||
# timestamp = int(time.mktime(time.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
for entry in day:
|
||||
if entry in go_channels:
|
||||
if go_channels[entry].checkSend(day[entry], False):
|
||||
self.sendtodb(go_channels[entry].mesh_name, day[entry], timestamp)
|
||||
except Exception, e:
|
||||
print("checkGaugeOffData Error: {}".format(e))
|
||||
|
||||
def checkLatestCard(self, numCards = 1):
|
||||
try:
|
||||
latest = ""
|
||||
if numCards == 1:
|
||||
latest = json.loads(requests.get(self.device_address + "/json/latestcard").text)
|
||||
else:
|
||||
latest = json.loads(requests.get(self.device_address + "/json/latestcard/{}".format(numCards)).text)
|
||||
# check the card to see if its new
|
||||
# 1. if its new send the folder/file_name to the card_history channel
|
||||
# 2. if its new and its been 10 minutes since you last sent an entire card, then send up all of the data
|
||||
for i in range(0, len(latest['card_data'])):
|
||||
card = latest['card_data'][i]
|
||||
if card_channels['card_id'].checkSend(card['Card_ID'], self.forceSend):
|
||||
dateTime = str(card["Stroke_Time"])
|
||||
|
||||
reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})"
|
||||
fd = re.search(reg, dateTime)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
card_timestamp = int(time.mktime(dt.timetuple()))
|
||||
|
||||
print "New card detected @ {0}".format(datetime.strftime(datetime.fromtimestamp(timestamp), "%Y-%m-%d %H:%M:%S.%f"))
|
||||
# set the last value = to current value and upload your data
|
||||
self.sendtodb("card_history", card_channels['card_id'].value, timestamp)
|
||||
|
||||
# check the last time the card was updated
|
||||
if (time.time() - card_channels['card'].last_send_time) > self.cardLoopTimer or self.statusChanged or self.forceSend:
|
||||
# its been 10 minutes, send the full upload
|
||||
print "Either status has changed or last stored card is too old."
|
||||
card_channels["card"].last_send_time = time.time()
|
||||
self.process_card(card, timestamp, card_timestamp, sendCards=True)
|
||||
return
|
||||
else:
|
||||
self.process_card(card, timestamp, card_timestamp, sendCards=False)
|
||||
except Exception, e:
|
||||
print("checkLatestCard Error: {}".format(e))
|
||||
|
||||
def process_card(self, data, data_timestamp, card_timestamp, sendCards=False):
|
||||
|
||||
# if sendCards = True then we upload all data no matter what, including cards
|
||||
|
||||
# check what type of data it is
|
||||
# check if its changed, if it has, how long has it been since it changed
|
||||
# NOTE: the initial vaue of "" is given to all channels in the channels object,
|
||||
# so to avoid comparing a string to a float, and to make sure on startup we send all of the values, the first time through we send everything that has a "" as its last value
|
||||
|
||||
if sendCards:
|
||||
self.sendtodb("cardtype", str(data['Card_Type']), int(data_timestamp))
|
||||
|
||||
s_p = data["Surface_Position"]
|
||||
s_l = data["Surface_Load"]
|
||||
d_p = data["Downhole_Position"]
|
||||
d_l = data["Downhole_Load"]
|
||||
newSc = "["
|
||||
newDc = "["
|
||||
|
||||
for i in range(len(s_p)):
|
||||
try:
|
||||
if s_p[i] is None:
|
||||
continue
|
||||
if s_p[i] != 0.0 and s_l[i] != 0.0:
|
||||
newSc += "[" + str(round(s_p[i],3)) + ", " + str(round(s_l[i],3)) + "], "
|
||||
except:
|
||||
pass
|
||||
newSc += "[" + str(round(s_p[0], 3)) + ", " + str(round(s_l[0], 3)) + "]"
|
||||
newSc += "]"
|
||||
|
||||
for i in range(len(d_p)):
|
||||
try:
|
||||
if d_p[i] is None:
|
||||
continue
|
||||
if d_p[i] != 0.0 and d_l[i] != 0.0:
|
||||
newDc += "[" + str(round(d_p[i], 3)) + ", " + str(round(d_l[i], 3)) + "], "
|
||||
except:
|
||||
pass
|
||||
newDc += "[" + str(round(d_p[0], 3)) + ", " + str(round(d_l[0], 3)) + "]"
|
||||
newDc += "]"
|
||||
|
||||
self.sendtodb("sc", newSc, card_timestamp)
|
||||
self.sendtodb("dc", newDc, card_timestamp)
|
||||
|
||||
def checkStoredValues(self, forceSend):
|
||||
try:
|
||||
data = json.loads(requests.get(self.device_address + "/json/tagvalues").text)
|
||||
if data['status'] == "OK":
|
||||
vals = data['vals']
|
||||
for val in vals:
|
||||
if val['name'] in tag_channels:
|
||||
if tag_channels[val['name']].checkSend(val['val'], forceSend):
|
||||
self.sendtodbJSON(tag_channels[val['name']].mesh_name, tag_channels[val['name']].value, 0)
|
||||
except Exception, e:
|
||||
print("checkStoredValues Error: {}".format(e))
|
||||
|
||||
def getLatestXCards(self, numCards):
|
||||
try:
|
||||
data = json.loads(requests.get(self.device_address + "/json/latest/" + str(int(numCards))).text)
|
||||
for card in data['cards']:
|
||||
card_data = json.loads(requests.get(self.device_address + "/json/" + data['folder'] + "/" + card).text)
|
||||
dateTime = str(card_data["card_data"]["Stroke_Time"])
|
||||
# timestamp = time.mktime(time.strptime(dateTime,'%Y-%m-%dT%H:%M:%S.%fZ'))
|
||||
reg = "(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})"
|
||||
fd = re.search(reg, dateTime)
|
||||
dt = datetime(int(fd.group(1)), int(fd.group(2)), int(fd.group(3)), int(fd.group(4)), int(fd.group(5)), int(fd.group(6)))
|
||||
timestamp = calendar.timegm(dt.timetuple())
|
||||
card_timestamp = int(time.mktime(dt.timetuple()))
|
||||
|
||||
channels["card_history"]["last_value"] = (data['folder'] + "/" + card)
|
||||
self.sendtodb("card_history", (data['folder'] + "/" + card), card_timestamp)
|
||||
self.process_card(card_data, timestamp, card_timestamp, sendCards=True)
|
||||
except Exception, e:
|
||||
print("getLatestXCards Error: {}".format(e))
|
||||
|
||||
def getDataLoggerStatus(self):
|
||||
try:
|
||||
data = json.loads(requests.get(self.device_address + "/json/pythonstatus/").text)
|
||||
al_status = "Not OK"
|
||||
if data['status']['alarmLogger']:
|
||||
al_status = "OK"
|
||||
|
||||
if al_status != self.al_status_last:
|
||||
self.sendtodb("alarmlogger_status", al_status, 0)
|
||||
self.al_status_last = al_status
|
||||
|
||||
dl_status = "Not OK"
|
||||
if data['status']['dataLogger']:
|
||||
dl_status = "OK"
|
||||
if al_status != self.dl_status_last:
|
||||
self.sendtodb("datalogger_status", dl_status, 0)
|
||||
self.dl_status_last = dl_status
|
||||
except Exception, e:
|
||||
print("getDataLoggerStatus Error: {}".format(e))
|
||||
|
||||
def poc_get_card(self, name, value):
|
||||
self.getcard(value)
|
||||
|
||||
def poc_sync(self, name, value):
|
||||
self.sendtodb("connected", "true", 0)
|
||||
return True
|
||||
|
||||
def poc_set_address(self, name, value):
|
||||
self.device_address = value
|
||||
return True
|
||||
|
||||
def poc_refresh_data(self, name, value):
|
||||
self.forceSend = True
|
||||
return True
|
||||
BIN
VFD/POC VFD/POC VFD.ccwsln
Normal file
BIN
VFD/POC VFD/POC VFD.v12.ccwsuo
Normal file
1
VFD/POC VFD/UDCProject/Library.idf
Normal file
@@ -0,0 +1 @@
|
||||
Powerflex755.iuux
|
||||
BIN
VFD/POC VFD/UDCProject/Melinda254.iuux
Normal file
BIN
VFD/POC VFD/UDCProject/PowerFlex 755_1.iuux
Normal file
BIN
VFD/POC VFD/UDCProject/Powerflex755.iuux
Normal file
20
VFD/POC VFD/UDCProject/UDCProject.acfproj
Normal file
@@ -0,0 +1,20 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||
<SchemaVersion>2.0</SchemaVersion>
|
||||
<ProjectGuid>{7f23e5b9-bff6-4757-9759-91eb01cf8811}</ProjectGuid>
|
||||
<OutputType>Exe</OutputType>
|
||||
<RootNamespace>MyRootNamespace</RootNamespace>
|
||||
<AssemblyName>MyAssemblyName</AssemblyName>
|
||||
<EnableUnmanagedDebugging>false</EnableUnmanagedDebugging>
|
||||
<CAMProjectFile>Library.idf</CAMProjectFile>
|
||||
<UniqueProjectId>{7f23e5b9-bff6-4757-9759-91eb01cf8811}</UniqueProjectId>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Online' ">
|
||||
<OutputPath>bin\Online\</OutputPath>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Simulation' " />
|
||||
<Import Project="$(DevEnvDir)\PackagesToLoad\Targets\ISaGRAF.ISaGRAF5.targets" />
|
||||
<Import Project="$(DevEnvDir)\PackagesToLoad\Targets\ISaGRAF.CCW.targets" />
|
||||
</Project>
|
||||