updated code for chunking and reorganized

This commit is contained in:
Nico Melone
2023-12-14 13:16:36 -06:00
parent a75d279bf6
commit 14c29b6718
52 changed files with 7044 additions and 1824 deletions

BIN
.DS_Store vendored

Binary file not shown.

BIN
Pub_Sub/.DS_Store vendored

Binary file not shown.

View File

@@ -290,10 +290,6 @@
{
"key": "MAC",
"value": "00:18:05:1e:94:f4"
},
{
"key": "MAC_UPPER",
"value": "00:18:05:1A:E5:57"
}
],
"quickfaas": {

File diff suppressed because one or more lines are too long

View File

@@ -289,10 +289,6 @@
{
"key": "MAC",
"value": "00:18:05:1e:95:14"
},
{
"key": "MAC_UPPER",
"value": "00:18:05:1A:E5:57"
}
],
"quickfaas": {

File diff suppressed because one or more lines are too long

View File

@@ -26,10 +26,10 @@ def sync():
for measure in controller["measures"]:
#publish measure
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
payload[measure["name"]] = convert_int(measure["name"], measure["value"])
payload[measure["name"]+ "_int"] = measure["value"]
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
payload["values"][measure["name"]+ "_int"] = measure["value"]
else:
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
for chunk in chunk_payload(payload=payload):

File diff suppressed because one or more lines are too long

View File

@@ -26,10 +26,10 @@ def sync():
for measure in controller["measures"]:
#publish measure
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
payload[measure["name"]] = convert_int(measure["name"], measure["value"])
payload[measure["name"]+ "_int"] = measure["value"]
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
payload["values"][measure["name"]+ "_int"] = measure["value"]
else:
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
for chunk in chunk_payload(payload=payload):

File diff suppressed because one or more lines are too long

View File

@@ -26,10 +26,10 @@ def sync():
for measure in controller["measures"]:
#publish measure
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
payload[measure["name"]] = convert_int(measure["name"], measure["value"])
payload[measure["name"]+ "_int"] = measure["value"]
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
payload["values"][measure["name"]+ "_int"] = measure["value"]
else:
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
for chunk in chunk_payload(payload=payload):

View File

@@ -150,6 +150,7 @@ def sendData(message,wizard_api):
#publish(__topic__, json.dumps(message), __qos__)
try:
checkCredentialConfig()
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
except Exception as e:
logger.error(f"Error in checkCredentialConfig: {e}")
try:

View File

@@ -199,6 +199,7 @@ def sendData(message,wizard_api):
#publish(__topic__, json.dumps(message), __qos__)
try:
checkCredentialConfig()
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
except Exception as e:
logger.error(f"Error in checkCredentialConfig: {e}")
try:

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,201 @@
# Enter your python code.
import json, time
from common.Logger import logger
from quickfaas.remotebus import publish
from quickfaas.measure import recall
def sendAlarm(message):
logger.info(message)
payload = {}
payload["ts"] = time.time()*1000
payload["values"] = {message["measureName"]: message["value"]}
publish(__topic__, json.dumps(payload), __qos__)
sync()
def sync():
#get new values and send
payload = {"ts": time.time()*1000, "values": {}}
try:
data = recall()#json.loads(recall().decode("utf-8"))
except Exception as e:
logger.error(e)
logger.debug(data)
for controller in data:
for measure in controller["measures"]:
#publish measure
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
payload["values"][measure["name"]+ "_int"] = measure["value"]
else:
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(__topic__))
logger.debug("Sending value: {}".format(payload))
publish(__topic__, json.dumps(payload), 1)
def convert_int(plc_tag, value):
well_status_codes = {
0: "Running",
1: "Pumped Off",
2: "Alarmed",
3: "Locked Out",
4: "Stopped"
}
pid_control_codes = {
0: "Flow",
1: "Fluid Level",
2: "Tubing Pressure",
3: "Manual"
}
downhole_codes = {
0: "OK",
1: "Connecting",
2: "Open Circuit",
3: "Shorted",
4: "Cannot Decode"
}
permissive_codes = {
0: "OK",
1: "Flow",
2: "Intake Pressure",
3: "Intake Temperature",
4: "Tubing Pressure",
5: "VFD",
6: "Fluid Level",
7: "Min. Downtime"
}
alarm_codes = {
0: "OK",
1: "Alarm"
}
alarm_vfd_codes = {
0: "OK",
1: "Locked Out"
}
vfd_fault_codes = {
0: "No Fault",
2: "Auxiliary Input",
3: "Power Loss",
4: "UnderVoltage",
5: "OverVoltage",
7: "Motor Overload",
8: "Heatsink OverTemp",
9: "Thermister OverTemp",
10: "Dynamic Brake OverTemp",
12: "Hardware OverCurrent",
13: "Ground Fault",
14: "Ground Warning",
15: "Load Loss",
17: "Input Phase Loss",
18: "Motor PTC Trip",
19: "Task Overrun",
20: "Torque Prove Speed Band",
21: "Output Phase Loss",
24: "Decel Inhibit",
25: "OverSpeed Limit",
26: "Brake Slipped",
27: "Torque Prove Conflict",
28: "TP Encls Confict",
29: "Analog In Loss",
33: "Auto Restarts Exhausted",
35: "IPM OverCurrent",
36: "SW OverCurrent",
38: "Phase U to Ground",
39: "Phase V to Ground",
40: "Phase W to Ground",
41: "Phase UV Short",
42: "Phase VW Short",
43: "Phase WU Short",
44: "Phase UNeg to Ground",
45: "Phase VNeg to Ground",
46: "Phase WNeg to Ground",
48: "System Defaulted",
49: "Drive Powerup",
51: "Clear Fault Queue",
55: "Control Board Overtemp",
59: "Invalid Code",
61: "Shear Pin 1",
62: "Shear Pin 2",
64: "Drive Overload",
66: "OW Torque Level",
67: "Pump Off",
71: "Port 1 Adapter",
72: "Port 2 Adapter",
73: "Port 3 Adapter",
74: "Port 4 Adapter",
75: "Port 5 Adapter",
76: "Port 6 Adapter",
77: "IR Volts Range",
78: "FluxAmps Ref Range",
79: "Excessive Load",
80: "AutoTune Aborted",
81: "Port 1 DPI Loss",
82: "Port 2 DPI Loss",
83: "Port 3 DPI Loss",
84: "Port 4 DPI Loss",
85: "Port 5 DPI Loss",
86: "Port 6 DPI Loss",
87: "IXo Voltage Range",
91: "Primary Velocity Feedback Loss",
93: "Hardware Enable Check",
94: "Alternate Velocity Feedback Loss",
95: "Auxiliary Velocity Feedback Loss",
96: "Position Feedback Loss",
97: "Auto Tach Switch",
100: "Parameter Checksum",
101: "Power Down NVS Blank",
102: "NVS Not Blank",
103: "Power Down NVS Incompatible",
104: "Power Board Checksum",
106: "Incompat MCB-PB",
107: "Replaced MCB-PB",
108: "Analog Calibration Checksum",
110: "Invalid Power Board Data",
111: "Power Board Invalid ID",
112: "Power Board App Min Version",
113: "Tracking DataError",
115: "Power Down Table Full",
116: "Power Down Entry Too Large",
117: "Power Down Data Checksum",
118: "Power Board Power Down Checksum",
124: "App ID Changed",
125: "Using Backup App",
134: "Start on Power Up",
137: "External Precharge Error",
138: "Precharge Open",
141: "Autotune Enc Angle",
142: "Autotune Speed Restricted",
143: "Autotune Current Regulator",
144: "Autotune Inertia",
145: "Autotune Travel",
13035: "Net IO Timeout",
13037: "Net IO Timeout"
}
plc_tags = {
"wellstatus": well_status_codes.get(value, "Invalid Code"),
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
"runpermissive": permissive_codes.get(value, "Invalid Code"),
"startpermissive": permissive_codes.get(value, "Invalid Code"),
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
}
return plc_tags.get(plc_tag, "Invalid Tag")

View File

@@ -0,0 +1,323 @@
# Enter your python code.
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot(reason="Rebooting for config file update"):
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
logger.info(reason)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
if not os.path.exists(path):
logger.debug("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
if not os.path.exists(path + "/" + filename):
logger.debug("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
def convertDStoJSON(ds):
j = dict()
for x in ds:
j[x["key"]] = x["value"]
return j
def convertJSONtoDS(j):
d = []
for key in j.keys():
d.append({"key": key, "value": j[key]})
return d
def checkCredentialConfig():
logger.debug("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.debug(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
checkFileExist("creds.json")
with open(credspath, "r") as c:
creds = json.load(c)
if creds:
logger.debug("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.debug("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
def checkParameterConfig(cfg):
logger.debug("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.debug("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.debug("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.debug("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message):
#logger.debug(message)
try:
checkCredentialConfig()
except Exception as e:
logger.error(e)
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
for measure in message["measures"]:
try:
logger.debug(measure)
if abs(payload["ts"]/1000 - measure["timestamp"]) > 3600:
reboot(reason="Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}".format(payload["ts"]/1000,measure["timestamp"]))
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
logger.debug("Converting DINT/BOOL to STRING")
value = convert_int(measure["name"], measure["value"])
logger.debug("Converted {} to {}".format(measure["value"], value))
payload["values"][measure["name"]] = value
payload["values"][measure["name"] + "_int"] = measure["value"]
else:
payload["values"][measure["name"]] = measure["value"]
except Exception as e:
logger.error(e)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
def convert_int(plc_tag, value):
well_status_codes = {
0: "Running",
1: "Pumped Off",
2: "Alarmed",
3: "Locked Out",
4: "Stopped"
}
pid_control_codes = {
0: "Flow",
1: "Fluid Level",
2: "Tubing Pressure",
3: "Manual"
}
downhole_codes = {
0: "OK",
1: "Connecting",
2: "Open Circuit",
3: "Shorted",
4: "Cannot Decode"
}
permissive_codes = {
0: "OK",
1: "Flow",
2: "Intake Pressure",
3: "Intake Temperature",
4: "Tubing Pressure",
5: "VFD",
6: "Fluid Level",
7: "Min. Downtime"
}
alarm_codes = {
0: "OK",
1: "Alarm"
}
alarm_vfd_codes = {
0: "OK",
1: "Locked Out"
}
vfd_fault_codes = {
0: "No Fault",
2: "Auxiliary Input",
3: "Power Loss",
4: "UnderVoltage",
5: "OverVoltage",
7: "Motor Overload",
8: "Heatsink OverTemp",
9: "Thermister OverTemp",
10: "Dynamic Brake OverTemp",
12: "Hardware OverCurrent",
13: "Ground Fault",
14: "Ground Warning",
15: "Load Loss",
17: "Input Phase Loss",
18: "Motor PTC Trip",
19: "Task Overrun",
20: "Torque Prove Speed Band",
21: "Output Phase Loss",
24: "Decel Inhibit",
25: "OverSpeed Limit",
26: "Brake Slipped",
27: "Torque Prove Conflict",
28: "TP Encls Confict",
29: "Analog In Loss",
33: "Auto Restarts Exhausted",
35: "IPM OverCurrent",
36: "SW OverCurrent",
38: "Phase U to Ground",
39: "Phase V to Ground",
40: "Phase W to Ground",
41: "Phase UV Short",
42: "Phase VW Short",
43: "Phase WU Short",
44: "Phase UNeg to Ground",
45: "Phase VNeg to Ground",
46: "Phase WNeg to Ground",
48: "System Defaulted",
49: "Drive Powerup",
51: "Clear Fault Queue",
55: "Control Board Overtemp",
59: "Invalid Code",
61: "Shear Pin 1",
62: "Shear Pin 2",
64: "Drive Overload",
66: "OW Torque Level",
67: "Pump Off",
71: "Port 1 Adapter",
72: "Port 2 Adapter",
73: "Port 3 Adapter",
74: "Port 4 Adapter",
75: "Port 5 Adapter",
76: "Port 6 Adapter",
77: "IR Volts Range",
78: "FluxAmps Ref Range",
79: "Excessive Load",
80: "AutoTune Aborted",
81: "Port 1 DPI Loss",
82: "Port 2 DPI Loss",
83: "Port 3 DPI Loss",
84: "Port 4 DPI Loss",
85: "Port 5 DPI Loss",
86: "Port 6 DPI Loss",
87: "IXo Voltage Range",
91: "Primary Velocity Feedback Loss",
93: "Hardware Enable Check",
94: "Alternate Velocity Feedback Loss",
95: "Auxiliary Velocity Feedback Loss",
96: "Position Feedback Loss",
97: "Auto Tach Switch",
100: "Parameter Checksum",
101: "Power Down NVS Blank",
102: "NVS Not Blank",
103: "Power Down NVS Incompatible",
104: "Power Board Checksum",
106: "Incompat MCB-PB",
107: "Replaced MCB-PB",
108: "Analog Calibration Checksum",
110: "Invalid Power Board Data",
111: "Power Board Invalid ID",
112: "Power Board App Min Version",
113: "Tracking DataError",
115: "Power Down Table Full",
116: "Power Down Entry Too Large",
117: "Power Down Data Checksum",
118: "Power Board Power Down Checksum",
124: "App ID Changed",
125: "Using Backup App",
134: "Start on Power Up",
137: "External Precharge Error",
138: "Precharge Open",
141: "Autotune Enc Angle",
142: "Autotune Speed Restricted",
143: "Autotune Current Regulator",
144: "Autotune Inertia",
145: "Autotune Travel",
13035: "Net IO Timeout",
13037: "Net IO Timeout"
}
plc_tags = {
"wellstatus": well_status_codes.get(value, "Invalid Code"),
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
"runpermissive": permissive_codes.get(value, "Invalid Code"),
"startpermissive": permissive_codes.get(value, "Invalid Code"),
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
}
return plc_tags.get(plc_tag, "Invalid Tag")

View File

@@ -0,0 +1,278 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
except Exception as e:
logger.error(e)
logger.debug(data)
for controller in data:
for measure in controller["measures"]:
#publish measure
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
payload["values"][measure["name"]] = convert_int(measure["name"], measure["value"])
payload["values"][measure["name"]+ "_int"] = measure["value"]
else:
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:
#value = json.loads(value.replace("'",'"'))
logger.debug(value)
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
message = [{"name": "advvfdipp", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
resp = write(message)
logger.debug("RETURN FROM WRITE: {}".format(resp))
return True
except Exception as e:
logger.debug(e)
return False
def receiveCommand(topic, payload):
try:
logger.debug(topic)
logger.debug(json.loads(payload))
p = json.loads(payload)
command = p["method"]
commands = {
"sync": sync,
"writeplctag": writeplctag,
}
if command == "setPLCTag":
try:
result = commands["writeplctag"](p["params"])
logger.debug(result)
except Exception as e:
logger.error(e)
elif command == "changeSetpoint":
try:
logger.debug("attempting controlpoint write")
params_type = {"measurement": "pidcontrolmode", "value": p["params"]["setpointType"]}
if params_type["value"]:
commands["writeplctag"](params_type)
time.sleep(2)
except Exception as e:
logger.error("DID NOT WRITE CONTROL MODE")
logger.error(e)
try:
logger.debug("attempting setpoint write")
modes = {0: "flowsetpoint", 1: "fluidlevelsetpoint", 2: "tubingpressuresetpoint", 3: "manualfrequencysetpoint"}
params_value = {"value": p["params"]["setpointValue"]}
if params_value["value"]:
params_value["measurement"] = modes[getMode()]
result = commands["writeplctag"](params_value)
logger.debug(result)
except Exception as e:
logger.error("DID NOT WRITE SETPOINT")
logger.error(e)
#logger.debug(command)
ack(topic.split("/")[-1])
time.sleep(5)
sync()
except Exception as e:
logger.debug(e)
def ack(msgid):
#logger.debug(msgid)
#logger.debug(mac)
#logger.debug(name)
#logger.debug(value)
publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}), 1)
def getMode():
try:
data = recall()
for controller in data:
for measure in controller["measures"]:
if measure["name"] == "pidcontrolmode":
return measure["value"]
except:
return None
def convert_int(plc_tag, value):
well_status_codes = {
0: "Running",
1: "Pumped Off",
2: "Alarmed",
3: "Locked Out",
4: "Stopped"
}
pid_control_codes = {
0: "Flow",
1: "Fluid Level",
2: "Tubing Pressure",
3: "Manual"
}
downhole_codes = {
0: "OK",
1: "Connecting",
2: "Open Circuit",
3: "Shorted",
4: "Cannot Decode"
}
permissive_codes = {
0: "OK",
1: "Flow",
2: "Intake Pressure",
3: "Intake Temperature",
4: "Tubing Pressure",
5: "VFD",
6: "Fluid Level",
7: "Min. Downtime"
}
alarm_codes = {
0: "OK",
1: "Alarm"
}
alarm_vfd_codes = {
0: "OK",
1: "Locked Out"
}
vfd_fault_codes = {
0: "No Fault",
2: "Auxiliary Input",
3: "Power Loss",
4: "UnderVoltage",
5: "OverVoltage",
7: "Motor Overload",
8: "Heatsink OverTemp",
9: "Thermister OverTemp",
10: "Dynamic Brake OverTemp",
12: "Hardware OverCurrent",
13: "Ground Fault",
14: "Ground Warning",
15: "Load Loss",
17: "Input Phase Loss",
18: "Motor PTC Trip",
19: "Task Overrun",
20: "Torque Prove Speed Band",
21: "Output Phase Loss",
24: "Decel Inhibit",
25: "OverSpeed Limit",
26: "Brake Slipped",
27: "Torque Prove Conflict",
28: "TP Encls Confict",
29: "Analog In Loss",
33: "Auto Restarts Exhausted",
35: "IPM OverCurrent",
36: "SW OverCurrent",
38: "Phase U to Ground",
39: "Phase V to Ground",
40: "Phase W to Ground",
41: "Phase UV Short",
42: "Phase VW Short",
43: "Phase WU Short",
44: "Phase UNeg to Ground",
45: "Phase VNeg to Ground",
46: "Phase WNeg to Ground",
48: "System Defaulted",
49: "Drive Powerup",
51: "Clear Fault Queue",
55: "Control Board Overtemp",
59: "Invalid Code",
61: "Shear Pin 1",
62: "Shear Pin 2",
64: "Drive Overload",
66: "OW Torque Level",
67: "Pump Off",
71: "Port 1 Adapter",
72: "Port 2 Adapter",
73: "Port 3 Adapter",
74: "Port 4 Adapter",
75: "Port 5 Adapter",
76: "Port 6 Adapter",
77: "IR Volts Range",
78: "FluxAmps Ref Range",
79: "Excessive Load",
80: "AutoTune Aborted",
81: "Port 1 DPI Loss",
82: "Port 2 DPI Loss",
83: "Port 3 DPI Loss",
84: "Port 4 DPI Loss",
85: "Port 5 DPI Loss",
86: "Port 6 DPI Loss",
87: "IXo Voltage Range",
91: "Primary Velocity Feedback Loss",
93: "Hardware Enable Check",
94: "Alternate Velocity Feedback Loss",
95: "Auxiliary Velocity Feedback Loss",
96: "Position Feedback Loss",
97: "Auto Tach Switch",
100: "Parameter Checksum",
101: "Power Down NVS Blank",
102: "NVS Not Blank",
103: "Power Down NVS Incompatible",
104: "Power Board Checksum",
106: "Incompat MCB-PB",
107: "Replaced MCB-PB",
108: "Analog Calibration Checksum",
110: "Invalid Power Board Data",
111: "Power Board Invalid ID",
112: "Power Board App Min Version",
113: "Tracking DataError",
115: "Power Down Table Full",
116: "Power Down Entry Too Large",
117: "Power Down Data Checksum",
118: "Power Board Power Down Checksum",
124: "App ID Changed",
125: "Using Backup App",
134: "Start on Power Up",
137: "External Precharge Error",
138: "Precharge Open",
141: "Autotune Enc Angle",
142: "Autotune Speed Restricted",
143: "Autotune Current Regulator",
144: "Autotune Inertia",
145: "Autotune Travel",
13035: "Net IO Timeout",
13037: "Net IO Timeout"
}
plc_tags = {
"wellstatus": well_status_codes.get(value, "Invalid Code"),
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
"runpermissive": permissive_codes.get(value, "Invalid Code"),
"startpermissive": permissive_codes.get(value, "Invalid Code"),
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code"),
"flowmeter_fault": alarm_codes.get(value, "Invalid Code")
}
return plc_tags.get(plc_tag, "Invalid Tag")

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,504 +0,0 @@
{
"controllers": [
{
"protocol": "EtherNet/IP",
"name": "plcpond",
"args": {},
"samplePeriod": 10,
"expired": 10000,
"endpoint": "192.168.1.12:44818"
}
],
"measures": [
{
"name": "pond_1_level",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Lev",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_total_bbls",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Total_Barrels",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_hi_alm",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "Pond_1_Hi_Alarm",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_hi_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Hi_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_hi_clr_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Hi_Clr_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_lo_alm",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "Pond_1_Lo_Alarm",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_lo_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Lo_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_1_lo_clr_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_1_Lo_Clr_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_level",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Lev",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_total_bbls",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Total_Barrels",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_hi_alm",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "Pond_2_Hi_Alarm",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_hi_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Hi_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_hi_clr_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Hi_Clr_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "1.0",
"offset": "0.0"
},
{
"name": "pond_2_lo_alm",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "BIT",
"addr": "Pond_2_Lo_Alarm",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_lo_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Lo_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "pond_2_lo_clr_spt",
"ctrlName": "plcpond",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "Pond_2_Lo_Clr_Setpoint",
"decimal": 2,
"len": 1,
"readWrite": "rw",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
}
],
"alarmLables": [
"default"
],
"alarms": [],
"groups": [
{
"name": "default",
"uploadInterval": 600,
"reference": 16
}
],
"misc": {
"maxAlarmRecordSz": 2000,
"logLvl": "INFO",
"coms": [
{
"name": "rs232",
"baud": 9600,
"bits": 8,
"stopbits": 1,
"parityChk": "n"
},
{
"name": "rs485",
"baud": 9600,
"bits": 8,
"stopbits": 1,
"parityChk": "n"
}
]
},
"clouds": [
{
"cacheSize": 100,
"enable": 1,
"name": "default",
"type": "Standard MQTT",
"args": {
"host": "hp.henrypump.cloud",
"port": 1883,
"clientId": "hp",
"auth": 1,
"tls": 0,
"cleanSession": 0,
"mqttVersion": "v3.1.1",
"keepalive": 60,
"key": "",
"cert": "",
"rootCA": "",
"verifyServer": 0,
"verifyClient": 0,
"username": "hp",
"passwd": "hp",
"authType": 1
}
}
],
"quickfaas": {
"genericFuncs": [],
"uploadFuncs": [
{
"name": "sendData",
"trigger": "measure_event",
"topic": "v1/devices/me/telemetry",
"qos": 1,
"groups": [
"default"
],
"funcName": "sendData",
"script": "# Enter your python code.\nimport json\nimport time\nfrom common.Logger import logger\nfrom quickfaas.remotebus import publish\n\n\ndef sendData(message):\n payload = {}\n payload[\"ts\"] = round(time.time() * 1000)\n payload[\"values\"] = {}\n for measure in message[\"measures\"]:\n try:\n logger.debug(measure)\n payload[\"values\"][measure[\"name\"]] = measure[\"value\"]\n except Exception as e:\n logger.error(e)\n publish(__topic__, json.dumps(payload), __qos__)",
"msgType": 0,
"cloudName": "default"
}
],
"downloadFuncs": []
},
"labels": [
{
"key": "SN",
"value": "GF5022223016120"
},
{
"key": "MAC",
"value": "00:18:05:21:b2:8a"
}
],
"modbusSlave": {
"enable": 0,
"protocol": "Modbus-TCP",
"port": 502,
"slaveAddr": 1,
"int16Ord": "ab",
"int32Ord": "abcd",
"float32Ord": "abcd",
"maxConnection": 5,
"mapping_table": []
},
"modbusRTUSlave": {
"enable": 0,
"protocol": "Modbus-RTU",
"coms": "rs485",
"slaveAddr": 1,
"int16Ord": "ab",
"int32Ord": "abcd",
"float32Ord": "abcd",
"mapping_table": []
},
"iec104Server": {
"enable": 0,
"cotSize": 2,
"port": 2404,
"serverList": [
{
"asduAddr": 1
}
],
"kValue": 12,
"wValue": 8,
"t0": 30,
"t1": 15,
"t2": 10,
"t3": 20,
"maximumLink": 5,
"timeSet": 1,
"byteOrder": "abcd",
"mapping_table": []
},
"iec104Client": {
"enable": 0,
"connectType": 2,
"serverAddr": "ipower.inhandcloud.cn",
"serverPort": 2404,
"communicationCode": "",
"protocol": 1,
"asduAddr": 1,
"tls": 0,
"mapping_table": {
"YX": [],
"YC": [],
"YK": []
}
},
"opcuaServer": {
"enable": 0,
"port": 4840,
"maximumLink": 5,
"securityMode": 0,
"identifierType": "String",
"mapping_table": []
},
"southMetadata": {},
"bindMetadata": {
"version": "",
"timestamp": ""
},
"bindConfig": {
"enable": 0,
"bind": {
"modelId": "",
"modelName": "",
"srcId": "",
"srcName": "",
"devId": "",
"devName": ""
},
"varGroups": [],
"variables": [],
"alerts": []
},
"version": "2.3.1"
}

View File

@@ -0,0 +1,33 @@
{
"timestamp": 1702477801,
"group_name": "default",
"values": {
"flowmeter": {
"totalizer_3_unit": {
"raw_data": 11,
"timestamp": 1702477799,
"status": 1
},
"totalizer_3": {
"raw_data": 8746.4500000000007,
"timestamp": 1702477799,
"status": 1
},
"totalizer_2": {
"raw_data": 8746.4400000000005,
"timestamp": 1702477799,
"status": 1
},
"totalizer_1": {
"raw_data": 8746.4400000000005,
"timestamp": 1702477799,
"status": 1
},
"flowrate": {
"raw_data": 10.0,
"timestamp": 1702477799,
"status": 1
}
}
}
}

View File

@@ -211,6 +211,8 @@ def sendData(message,wizard_api):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
if dayReset:
resetPayload["values"]["yesterday_volume"] = payload["values"]["day_volume"]
resetPayload["values"]["day_volume"] = 0

View File

@@ -0,0 +1,405 @@
import json, os, time, shutil
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
from mobiuspi_lib.gps import GPS
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot(reason="Rebooting for config file update"):
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
logger.info(reason)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
try:
if not os.path.exists(path):
logger.debug("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
except Exception as e:
logger.error(f"Something went wrong in checkFileExist while making folder: {e}")
try:
if not os.path.exists(path + "/" + filename):
logger.debug("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
except Exception as e:
logger.error(f"Something went wrong in checkFileExist wihle making file: {e}")
def convertDStoJSON(ds):
j = dict()
try:
for x in ds:
j[x["key"]] = x["value"]
except Exception as e:
logger.error(f"Something went wrong in convertDStoJSON: {e}")
return j
def convertJSONtoDS(j):
d = []
try:
for key in j.keys():
d.append({"key": key, "value": j[key]})
except Exception as e:
logger.error(f"Something went wrong in convertJSONtoDS: {e}")
return d
def checkCredentialConfig():
logger.debug("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
try:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.debug(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
try:
checkFileExist("creds.json")
except Exception as e:
logger.error(f"Error in checkFileExist: {e}")
with open(credspath, "r") as c:
try:
creds = json.load(c)
if creds:
logger.debug("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
except Exception as e:
logger.error(f"Error trying to load credentials from file: {e}")
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.debug("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
except Exception as e:
logger.error(f"Somethign went wrong in checkCredentialConfig: {e}")
def checkParameterConfig(cfg):
try:
logger.debug("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.debug("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.debug("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.debug("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
except Exception as e:
logger.error(f"Something went wrong in checkParameterConfig: {e}")
os.system(f'rm {paramspath}')
return cfg
payload = {}
def initialize_totalizers():
return {
"day": 0,
"week": 0,
"month": 0,
"year": 0,
"lifetime": 0,
"dayHolding": 0,
"weekHolding": 0,
"monthHolding": 0,
"yearHolding": 0
}
def getTotalizers(file_path="/var/user/files/totalizers.json"):
"""
Retrieves totalizer data from a JSON file.
:param file_path: Path to the JSON file storing totalizer data.
:return: Dictionary containing totalizer values.
"""
try:
with open(file_path, "r") as t:
totalizers = json.load(t)
if not totalizers or not isinstance(totalizers, dict):
logger.info("Invalid data format in the file. Initializing totalizers.")
totalizers = initialize_totalizers()
except FileNotFoundError:
logger.info("File not found. Initializing totalizers.")
totalizers = initialize_totalizers()
except json.JSONDecodeError:
timestamp = dt.now().strftime("%Y%m%d_%H%M%S")
# Split the file path and insert the timestamp before the extension
file_name, file_extension = os.path.splitext(file_path)
backup_file_path = f"{file_name}_{timestamp}{file_extension}"
shutil.copyfile(file_path, backup_file_path)
logger.error(f"Error decoding JSON. A backup of the file is created at {backup_file_path}. Initializing totalizers.")
totalizers = initialize_totalizers()
return totalizers
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message,wizard_api):
logger.debug(message)
checkCredentialConfig()
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
resetPayload = {"ts": "", "values": {}}
dayReset, weekReset, monthReset, yearReset = False, False, False, False
for measure in message["values"]["flowmeter"].keys():
try:
if message["values"]["flowmeter"][measure]["status"] == 1:
if measure in ["totalizer_1"]:
payload["values"]["day_volume"], dayReset = totalizeDay(message["values"]["flowmeter"][measure]["raw_data"])
payload["values"]["week_volume"], weekReset = totalizeWeek(message["values"]["flowmeter"][measure]["raw_data"])
payload["values"]["month_volume"], monthReset = totalizeMonth(message["values"]["flowmeter"][measure]["raw_data"])
payload["values"]["year_volume"], yearReset = totalizeYear(message["values"]["flowmeter"][measure]["raw_data"])
payload["values"][measure] = message["values"]["flowmeter"][measure]["raw_data"]
except Exception as e:
logger.error(e)
try:
payload["values"]["latitude"], payload["values"]["longitude"], payload["values"]["speed"] = getGPS()
except:
logger.error("Could not get GPS coordinates")
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
if dayReset:
resetPayload["values"]["yesterday_volume"] = payload["values"]["day_volume"]
resetPayload["values"]["day_volume"] = 0
if weekReset:
resetPayload["values"]["last_week_volume"] = payload["values"]["week_volume"]
resetPayload["values"]["week_volume"] = 0
if monthReset:
resetPayload["values"]["last_month_volume"] = payload["values"]["month_volume"]
resetPayload["values"]["month_volume"] = 0
if yearReset:
resetPayload["values"]["last_year_volume"] = payload["values"]["year_volume"]
resetPayload["values"]["year_volume"] = 0
if resetPayload["values"]:
resetPayload["ts"] = 1 + (round(dt.timestamp(dt.now())/600)*600)*1000
publish(__topic__, json.dumps(resetPayload), __qos__)
def saveTotalizers(totalizers, file_path="/var/user/files/totalizers.json"):
"""
Saves totalizer data to a JSON file.
:param totalizers: Dictionary containing totalizer values to be saved.
:param file_path: Path to the JSON file where totalizer data will be saved.
"""
try:
with open(file_path, "w") as t:
json.dump(totalizers, t)
except (IOError, OSError, json.JSONEncodeError) as e:
logger.error(f"Error saving totalizers to {file_path}: {e}")
raise # Optionally re-raise the exception if it should be handled by the caller
def getGPS():
# Create a gps instance
gps = GPS()
# Retrieve GPS information
position_status = gps.get_position_status()
logger.debug("position_status: ")
logger.debug(position_status)
latitude = position_status["latitude"].split(" ")
longitude = position_status["longitude"].split(" ")
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
if latitude[2] == "S":
lat_dec = lat_dec * -1
if longitude[2] == "W":
lon_dec = lon_dec * -1
#lat_dec = round(lat_dec, 7)
#lon_dec = round(lon_dec, 7)
logger.info("HERE IS THE GPS COORDS")
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
speedKnots = position_status["speed"].split(" ")
speedMPH = float(speedKnots[0]) * 1.151
return (f"{lat_dec:.8f}",f"{lon_dec:.8f}",f"{speedMPH:.2f}")
def totalizeDay(lifetime, max_retries=3, retry_delay=2):
"""
Update and save daily totalizers based on the lifetime value.
:param lifetime: The current lifetime total.
:param max_retries: Maximum number of save attempts.
:param retry_delay: Delay in seconds between retries.
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
"""
totalizers = getTotalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["dayHolding"]
if not int(now.strftime("%d")) == int(totalizers["day"]):
totalizers["dayHolding"] = lifetime
totalizers["day"] = int(now.strftime("%d"))
for attempt in range(max_retries):
try:
saveTotalizers(totalizers)
reset = True
return (value, reset)
except Exception as e:
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
if attempt < max_retries - 1:
time.sleep(retry_delay)
else:
logger.error("All attempts to save totalizers failed.")
return (None, False)
return (value, reset)
def totalizeWeek(lifetime, max_retries=3, retry_delay=2):
"""
Update and save weekly totalizers based on the lifetime value.
:param lifetime: The current lifetime total.
:param max_retries: Maximum number of save attempts.
:param retry_delay: Delay in seconds between retries.
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
"""
totalizers = getTotalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["weekHolding"]
if (not now.strftime("%U") == totalizers["week"] and now.strftime("%a") == "Sun") or totalizers["week"] == 0:
totalizers["weekHolding"] = lifetime
totalizers["week"] = now.strftime("%U")
for attempt in range(max_retries):
try:
saveTotalizers(totalizers)
reset = True
return (value, reset)
except Exception as e:
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
if attempt < max_retries - 1:
time.sleep(retry_delay)
else:
logger.error("All attempts to save totalizers failed.")
return (None, False)
return (value, reset)
def totalizeMonth(lifetime, max_retries=3, retry_delay=2):
"""
Update and save monthly totalizers based on the lifetime value.
:param lifetime: The current lifetime total.
:param max_retries: Maximum number of save attempts.
:param retry_delay: Delay in seconds between retries.
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
"""
totalizers = getTotalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["monthHolding"]
if not int(now.strftime("%m")) == int(totalizers["month"]):
totalizers["monthHolding"] = lifetime
totalizers["month"] = now.strftime("%m")
for attempt in range(max_retries):
try:
saveTotalizers(totalizers)
reset = True
return (value, reset)
except Exception as e:
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
if attempt < max_retries - 1:
time.sleep(retry_delay)
else:
logger.error("All attempts to save totalizers failed.")
return (None, False)
return (value,reset)
def totalizeYear(lifetime, max_retries=3, retry_delay=2):
"""
Update and save yearly totalizers based on the lifetime value.
:param lifetime: The current lifetime total.
:param max_retries: Maximum number of save attempts.
:param retry_delay: Delay in seconds between retries.
:return: A tuple containing the calculated value and a boolean indicating if a reset occurred, or (None, False) if save fails.
"""
totalizers = getTotalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["yearHolding"]
if not int(now.strftime("%Y")) == int(totalizers["year"]):
totalizers["yearHolding"] = lifetime
totalizers["year"] = now.strftime("%Y")
for attempt in range(max_retries):
try:
saveTotalizers(totalizers)
reset = True
return (value, reset)
except Exception as e:
logger.error(f"Attempt {attempt + 1} failed to save totalizers: {e}")
if attempt < max_retries - 1:
time.sleep(retry_delay)
else:
logger.error("All attempts to save totalizers failed.")
return (None, False)
return (value, reset)

View File

@@ -1,21 +1,28 @@
# Enter your python code.
import json
import json, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
def chunk_payload(payload, chunk_size=20):
for controller, data in payload.items():
for entry in data:
ts = entry['ts']
values = entry['values']
chunked_values = list(values.items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"controller": controller,
"ts": ts,
"values": dict(chunked_values[i:i + chunk_size])
}
def chunk_payload(payload, chunk_size=20, is_attributes_payload=False):
if is_attributes_payload:
# For attributes payload, chunk the controllers
controllers = list(payload.items())
for i in range(0, len(controllers), chunk_size):
yield dict(controllers[i:i + chunk_size])
else:
# For data payload, chunk the values within each controller
for controller, data in payload.items():
for entry in data:
ts = entry['ts']
values = entry['values']
chunked_values = list(values.items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"controller": controller,
"ts": ts,
"values": dict(chunked_values[i:i + chunk_size])
}
def sendData(message):
#logger.debug(message)
@@ -53,5 +60,11 @@ def sendData(message):
#logger.debug(payload)
publish(__topic__, json.dumps(payload), __qos__)
publish("v1/gateway/attributes", json.dumps(attributes_payload), __qos__)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
for chunk in chunk_payload(payload=attributes_payload, is_attributes_payload=True):
publish("v1/gateway/attributes", json.dumps(attributes_payload), __qos__)
time.sleep(2)

View File

@@ -6,10 +6,9 @@ from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot(reason="Rebooting for config file update"):
def reboot():
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
logger.info(reason)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
@@ -38,21 +37,21 @@ def convertJSONtoDS(j):
return d
def checkCredentialConfig():
logger.debug("CHECKING CONFIG")
logger.info("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.debug(clouds)
logger.info(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
checkFileExist("creds.json")
with open(credspath, "r") as c:
creds = json.load(c)
if creds:
logger.debug("updating config with stored data")
logger.info("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
@@ -65,7 +64,7 @@ def checkCredentialConfig():
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.debug("updating stored file with new data")
logger.info("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
@@ -85,20 +84,20 @@ def checkCredentialConfig():
json.dump(creds,cw)
def checkParameterConfig(cfg):
logger.debug("Checking Parameters!!!!")
logger.info("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.debug("Opened param storage file")
logger.info("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.debug("equalizing params between cfg and stored")
logger.info("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
@@ -111,7 +110,7 @@ def checkParameterConfig(cfg):
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.debug("initializing param file with params in memory")
logger.info("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
@@ -133,26 +132,14 @@ def sendData(message):
except Exception as e:
logger.error(e)
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
run_tanks = {}
for measure in message["measures"]:
try:
logger.debug(measure)
if abs(payload["ts"]/1000 - measure["timestamp"]) > 3600:
reboot(reason="Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}".format(payload["ts"]/1000,measure["timestamp"]))
if measure["name"] in ["oil_run_tank","water_run_tank"]:
run_tanks[measure["name"]] = measure["value"]
if "_level" in measure["name"]:
run_tanks[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
except Exception as e:
logger.error(e)
payload["values"]["oil_run_tank_level"] = run_tanks["oil_0" + str(run_tanks["oil_run_tank"]) + "_level"]
payload["values"]["water_run_tank_level"] = run_tanks["water_0" + str(run_tanks["water_run_tank"]) + "_level"]
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)

View File

@@ -1,5 +1,5 @@
# Enter your python code.
import json, os
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
@@ -117,6 +117,15 @@ def checkParameterConfig(cfg):
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message):
#logger.debug(message)
try:
@@ -140,6 +149,8 @@ def sendData(message):
logger.error(e)
payload["values"]["oil_run_tank_level"] = run_tanks["oil_0" + str(run_tanks["oil_run_tank"]) + "_level"]
payload["values"]["water_run_tank_level"] = run_tanks["water_0" + str(run_tanks["water_run_tank"]) + "_level"]
publish(__topic__, json.dumps(payload), __qos__)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)

View File

@@ -1,11 +1,21 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {}
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
@@ -15,10 +25,14 @@ def sync():
for controller in data:
for measure in controller["measures"]:
#publish measure
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
publish(topic, json.dumps(payload), 1)
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:

View File

@@ -1,5 +1,5 @@
# Enter your python code.
import json, os
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
@@ -117,6 +117,15 @@ def checkParameterConfig(cfg):
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message):
#logger.debug(message)
try:
@@ -140,6 +149,10 @@ def sendData(message):
logger.error(e)
payload["values"]["oil_run_tank_level"] = run_tanks["oil_0" + str(run_tanks["oil_run_tank"]) + "_level"]
payload["values"]["water_run_tank_level"] = run_tanks["water_0" + str(run_tanks["water_run_tank"]) + "_level"]
publish(__topic__, json.dumps(payload), __qos__)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)

View File

@@ -1,11 +1,21 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {}
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
@@ -15,10 +25,13 @@ def sync():
for controller in data:
for measure in controller["measures"]:
#publish measure
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
publish(topic, json.dumps(payload), 1)
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:

View File

@@ -1,4 +1,4 @@
import json, os
import json, os, time
from datetime import datetime as dt
from datetime import timedelta as td
from common.Logger import logger
@@ -117,8 +117,16 @@ def checkParameterConfig(cfg):
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message, wizard_api):
def sendData(message):
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
resetPayload = {"ts": "", "values": {}}
try:
@@ -136,7 +144,9 @@ def sendData(message, wizard_api):
payload["values"][measure] = message["values"]["hrvalvecontroller"][measure]["raw_data"]
except Exception as e:
logger.error(e)
publish(__topic__, json.dumps(payload), __qos__, cloud_name="default")
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__, cloud_name="default")
if dayReset:

View File

@@ -1,4 +1,4 @@
import json, os
import json, os, time
from common.Logger import logger
from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
@@ -143,6 +143,15 @@ def checkParameterConfig(cfg):
os.system(f'rm {paramspath}')
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message):
payload = {}
payload["ts"] = (round(dt.timestamp(dt.now())/600)*600)*1000
@@ -157,5 +166,8 @@ def sendData(message):
payload["values"][measure["name"]] = measure["value"]
except Exception as e:
logger.error(e)
publish(__topic__, json.dumps(payload), __qos__)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)

View File

@@ -1,11 +1,21 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {}
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
@@ -15,10 +25,12 @@ def sync():
for controller in data:
for measure in controller["measures"]:
#publish measure
payload[measure["name"]] = measure["value"]
payload["values"][measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
publish(topic, json.dumps(payload), 1)
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:

View File

@@ -0,0 +1,12 @@
# Enter your python code.
import json, time
from common.Logger import logger
from quickfaas.remotebus import publish
def sendAlarm(message):
logger.info(message)
payload = {}
payload["ts"] = time.time()*1000
payload["values"] = {message["measureName"]: message["value"]}
publish(__topic__, json.dumps(payload), __qos__)

View File

@@ -0,0 +1,179 @@
# Enter your python code.
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from mobiuspi_lib.gps import GPS
from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot():
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
if not os.path.exists(path):
logger.info("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
if not os.path.exists(path + "/" + filename):
logger.info("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
def convertDStoJSON(ds):
j = dict()
for x in ds:
j[x["key"]] = x["value"]
return j
def convertJSONtoDS(j):
d = []
for key in j.keys():
d.append({"key": key, "value": j[key]})
return d
def checkCredentialConfig():
logger.info("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.info(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
checkFileExist("creds.json")
with open(credspath, "r") as c:
creds = json.load(c)
if creds:
logger.info("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.info("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
def checkParameterConfig(cfg):
logger.info("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.info("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.info("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.info("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def getGPS():
# Create a gps instance
gps = GPS()
# Retrieve GPS information
position_status = gps.get_position_status()
logger.debug("position_status: ")
logger.debug(position_status)
latitude = position_status["latitude"].split(" ")
longitude = position_status["longitude"].split(" ")
lat_dec = int(latitude[0][:-1]) + (float(latitude[1][:-1])/60)
lon_dec = int(longitude[0][:-1]) + (float(longitude[1][:-1])/60)
if latitude[2] == "S":
lat_dec = lat_dec * -1
if longitude[2] == "W":
lon_dec = lon_dec * -1
#lat_dec = round(lat_dec, 7)
#lon_dec = round(lon_dec, 7)
logger.info("HERE IS THE GPS COORDS")
logger.info(f"LATITUDE: {lat_dec}, LONGITUDE: {lon_dec}")
speedKnots = position_status["speed"].split(" ")
speedMPH = float(speedKnots[0]) * 1.151
publish(__topic__, json.dumps({"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values":{"latitude":f"{lat_dec:.8f}", "longitude":f"{lon_dec:.8f}", "speed": f"{speedMPH:.2f}"}}), __qos__)
publish("v1/devices/me/attributes", json.dumps({"latitude":f"{lat_dec:.8f}", "longitude":f"{lon_dec:.8f}", "speed": f"{speedMPH:.2f}"}), __qos__)
def sendData(message):
payload = {}
payload["ts"] = (round(dt.timestamp(dt.now())/600)*600)*1000
payload["values"] = {}
try:
checkCredentialConfig()
except Exception as e:
logger.error(e)
for measure in message["measures"]:
try:
logger.debug(measure)
payload["values"][measure["name"]] = measure["value"]
except Exception as e:
logger.error(e)
try:
getGPS()
except Exception as e:
logger.error(e)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)

View File

@@ -0,0 +1,35 @@
# Enter your python code.
from common.Logger import logger
import json, time
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:
#value = json.loads(value.replace("'",'"'))
logger.debug(value)
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
message = [{"name": "tankalarms", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
resp = write(message)
logger.debug("RETURN FROM WRITE: {}".format(resp))
return True
except Exception as e:
logger.debug(e)
return False
def receiveAttributes(topic, payload):
logger.debug(topic)
logger.debug(json.loads(payload))
#payload format: {'tankalarms.water_hihi_spt': 12}
payload = json.loads(payload)
measures = list(payload.keys())
logger.debug(measures)
if "tankalarms." in measures[0]:
measure = measures[0].split(".")[1]
else:
measure = measures[0]
logger.debug(measure)
writeplctag({"measurement": measure, "value": payload[measures[0]]})

View File

@@ -31,20 +31,22 @@ def sync():
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value, wizard_api):
def writeplctag(value):
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
try:
#value = json.loads(value.replace("'",'"'))
logger.debug(value)
message = {"plcpond":{value["measurement"]: value["value"]}}
resp = wizard_api.write_plc_values(message)
#logger.debug("RETURN FROM WRITE: {}".format(resp))
#payload format: [{"name": "tankalarms", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
message = [{"name": "tankalarms", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
resp = write(message)
logger.debug("RETURN FROM WRITE: {}".format(resp))
return True
except Exception as e:
logger.debug(e)
return False
def receiveCommand(topic, payload, wizard_api):
def receiveCommand(topic, payload):
try:
logger.debug(topic)
logger.debug(json.loads(payload))
@@ -55,19 +57,24 @@ def receiveCommand(topic, payload, wizard_api):
"writeplctag": writeplctag,
}
if command == "setPLCTag":
result = commands["writeplctag"](p["params"],wizard_api)
if result:
sync()
#commands[command](p["mac"].lower(),p["payload"]["value"], wizard_api)
try:
result = commands["writeplctag"](p["params"])
logger.debug(result)
except Exception as e:
logger.error(e)
#logger.debug(command)
ack(topic.split("/")[-1], wizard_api)
ack(topic.split("/")[-1])
time.sleep(5)
sync()
except Exception as e:
logger.debug(e)
def ack(msgid, wizard_api):
def ack(msgid):
#logger.debug(msgid)
#logger.debug(mac)
#logger.debug(name)
#logger.debug(value)
wizard_api.mqtt_publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}))
publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}), 1)

View File

@@ -646,14 +646,6 @@
{
"key": "MAC",
"value": "00:18:05:22:9c:ec"
},
{
"key": "test",
"value": "wooopwooop"
},
{
"key": "itME",
"value": "AAAHHHHH"
}
],
"modbusSlave": {

View File

@@ -1,5 +1,5 @@
# Enter your python code.
import json, os
import json, os, time
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
@@ -116,6 +116,16 @@ def checkParameterConfig(cfg):
return cfg
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sendData(message):
logger.debug(message)
try:
@@ -144,7 +154,11 @@ def sendData(message):
payload["values"]["valve_status"] = 0
else:
payload["values"]["valve_status"] = -1
publish(__topic__, json.dumps(payload), __qos__)
for chunk in chunk_payload(payload=payload):
publish(__topic__, json.dumps(chunk), __qos__)
time.sleep(2)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
except Exception as e:
logger.error(e)

View File

@@ -1,19 +1,28 @@
import json, time
from datetime import datetime as dt
from quickfaas.measure import recall, write
from quickfaas.remotebus import publish
from common.Logger import logger
# Helper function to split the payload into chunks
def chunk_payload(payload, chunk_size=20):
chunked_values = list(payload["values"].items())
for i in range(0, len(chunked_values), chunk_size):
yield {
"ts": payload["ts"],
"values": dict(chunked_values[i:i+chunk_size])
}
def sync():
#get new values and send
payload = {}
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
topic = "v1/devices/me/telemetry"
try:
data = recall()#json.loads(recall().decode("utf-8"))
except Exception as e:
logger.error(e)
logger.info(data)
logger.debug(data)
for controller in data:
payload = {"ts": int(time.time()*1000), "values": {}}
valve_open = 0
valve_close = 0
for measure in controller["measures"]:
@@ -36,7 +45,10 @@ def sync():
payload["values"]["valve_status"] = "Unknown"
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
publish(topic, json.dumps(payload))
for chunk in chunk_payload(payload=payload):
publish(topic, json.dumps(chunk), 1)
time.sleep(2)
def writeplctag(value):
try:
#value = json.loads(value.replace("'",'"'))

View File

@@ -53,8 +53,8 @@ def write_config(path, config, pubDir, subDir):
with os.scandir(subDir) as it:
funcNum = 0
for entry in it:
#print(entry)
#print(config["quickfaas"]["downloadFuncs"][ind])
print(entry)
print(config["quickfaas"]["downloadFuncs"][funcNum])
if not entry.name.startswith('.') and entry.is_file():
config["quickfaas"]["downloadFuncs"][funcNum]["script"] = code_to_string(entry.path)
funcNum += 1

View File

@@ -0,0 +1,495 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" Corrected Total Volume Flow Rate Total\n",
"timestamp \n",
"2023-11-30 1578599 18619550\n"
]
}
],
"source": [
"import pandas as pd\n",
"\n",
"# Step 1: Load the data\n",
"total_volume_df = pd.read_csv('/Users/nico/Downloads/monthly_total.csv')\n",
"flow_rate_df = pd.read_csv('/Users/nico/Downloads/flow_rate.csv')\n",
"\n",
"# Step 2: Convert timestamps to datetime\n",
"total_volume_df['timestamp'] = pd.to_datetime(total_volume_df['timestamp'], format=\"%m/%d/%Y %I:%M:%S %p\")\n",
"flow_rate_df['timestamp'] = pd.to_datetime(flow_rate_df['timestamp'], format=\"%m/%d/%Y %I:%M:%S %p\")\n",
"\n",
"# Sort the data in ascending order by timestamp\n",
"total_volume_df = total_volume_df.sort_values(by='timestamp')\n",
"flow_rate_df = flow_rate_df.sort_values(by='timestamp')\n",
"\n",
"# Step 3: Identify resets and sum the totals for each month\n",
"total_volume_df['reset'] = total_volume_df['value'].lt(total_volume_df['value'].shift())\n",
"total_volume_df['cumulative'] = total_volume_df.groupby(total_volume_df['reset'].cumsum())['value'].cumsum()\n",
"monthly_total = total_volume_df.groupby(pd.Grouper(key='timestamp', freq='M'))['cumulative'].last()\n",
"\n",
"# Sort the flow rate data in ascending order by timestamp\n",
"flow_rate_df = flow_rate_df.sort_values(by='timestamp')\n",
"\n",
"# Recalculate the time differences and total volume\n",
"flow_rate_df['time_diff'] = flow_rate_df['timestamp'].diff().dt.total_seconds().div(60) # in minutes\n",
"flow_rate_df.iloc[0, flow_rate_df.columns.get_loc('time_diff')] = 0\n",
"flow_rate_df['total_volume'] = flow_rate_df['value'] * flow_rate_df['time_diff']\n",
"\n",
"# Recalculate the monthly totals for the flow rate data\n",
"monthly_flow_rate_total = flow_rate_df.groupby(pd.Grouper(key='timestamp', freq='M'))['total_volume'].sum()\n",
"\n",
"# Recalculate the corrected total volume for the total volume data\n",
"# The total volume data calculation remains the same as before\n",
"monthly_total = total_volume_df.groupby(pd.Grouper(key='timestamp', freq='M'))['cumulative'].last()\n",
"\n",
"# Compare the two monthly totals again\n",
"comparison_df = pd.DataFrame({'Corrected Total Volume': monthly_total, 'Flow Rate Total': monthly_flow_rate_total})\n",
"comparison_df.reset_index() # Resetting index for better visualization\n",
"comparison_df.head()\n",
"pd.options.display.float_format = '{:.0f}'.format\n",
"print(comparison_df)\n"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" timestamp value reset cumulative\n",
"393 2023-11-01 08:33:50 25900882 False 25900882\n",
"392 2023-11-01 08:33:55 0 True 0\n",
"391 2023-11-01 08:57:09 21068 False 21068\n",
"390 2023-11-01 09:21:12 42084 False 63152\n",
"389 2023-11-01 09:45:18 63158 False 126310\n",
"388 2023-11-01 10:09:20 84192 False 210501\n",
"387 2023-11-01 10:33:23 105197 False 315698\n",
"386 2023-11-01 10:57:31 126286 False 441984\n",
"385 2023-11-01 11:21:38 147360 False 589344\n",
"384 2023-11-01 11:45:41 168368 False 757712\n",
"383 2023-11-01 12:09:47 189430 False 947142\n",
"382 2023-11-01 12:33:55 210496 False 1157638\n",
"381 2023-11-01 12:58:03 231583 False 1389221\n",
"380 2023-11-01 13:22:11 252675 False 1641896\n",
"379 2023-11-01 13:46:18 273709 False 1915604\n",
"378 2023-11-01 14:10:25 294772 False 2210376\n",
"377 2023-11-01 14:34:32 315819 False 2526195\n",
"376 2023-11-01 14:58:41 336882 False 2863077\n",
"375 2023-11-01 15:22:45 357895 False 3220972\n",
"374 2023-11-01 15:46:53 378974 False 3599946\n",
"373 2023-11-01 16:11:00 400008 False 3999954\n",
"372 2023-11-01 16:35:10 421087 False 4421041\n",
"371 2023-11-01 16:59:18 442139 False 4863180\n",
"370 2023-11-01 17:23:26 463200 False 5326380\n",
"369 2023-11-01 17:47:36 484273 False 5810653\n",
"368 2023-11-01 18:11:44 505336 False 6315989\n",
"367 2023-11-01 18:35:51 526376 False 6842364\n",
"366 2023-11-01 18:59:58 547407 False 7389771\n",
"365 2023-11-01 19:24:08 568480 False 7958252\n",
"364 2023-11-01 19:48:15 589509 False 8547761\n",
"363 2023-11-01 20:12:25 610599 False 9158360\n",
"362 2023-11-01 20:36:31 631633 False 9789993\n",
"361 2023-11-01 21:00:37 652667 False 10442660\n",
"360 2023-11-01 21:24:45 673727 False 11116387\n",
"359 2023-11-02 08:35:08 688950 False 11805336\n",
"358 2023-11-02 08:58:24 709958 False 12515294\n",
"357 2023-11-02 09:22:33 731005 False 13246299\n",
"356 2023-11-02 09:46:39 752007 False 13998306\n",
"355 2023-11-02 10:10:46 773036 False 14771342\n",
"354 2023-11-02 10:34:53 794044 False 15565386\n",
"353 2023-11-02 10:59:03 815091 False 16380478\n",
"352 2023-11-02 11:23:11 836123 False 17216601\n",
"351 2023-11-02 11:47:19 857128 False 18073729\n",
"350 2023-11-02 12:11:28 878175 False 18951904\n",
"349 2023-11-02 12:35:42 899278 False 19851182\n",
"348 2023-11-02 12:59:54 920362 False 20771544\n",
"347 2023-11-02 13:24:09 941446 False 21712989\n",
"346 2023-11-02 13:48:17 962456 False 22675446\n",
"345 2023-11-02 14:12:26 983488 False 23658933\n",
"344 2023-11-02 14:36:40 1004556 False 24663489\n",
"343 2023-11-02 15:00:54 1025640 False 25689129\n",
"342 2023-11-02 15:25:07 1046729 False 26735859\n",
"341 2023-11-02 15:49:18 1067756 False 27803614\n",
"340 2023-11-02 16:13:33 1088845 False 28892459\n",
"339 2023-11-02 16:37:47 1109913 False 30002372\n",
"338 2023-11-02 17:02:01 1130997 False 31133369\n",
"337 2023-11-02 17:26:14 1152055 False 32285424\n",
"336 2023-11-02 17:50:29 1173134 False 33458557\n",
"335 2023-11-02 18:14:45 1194218 False 34652775\n",
"334 2023-11-02 18:38:57 1215273 False 35868047\n",
"333 2023-11-02 19:04:01 1236344 False 37104391\n",
"332 2023-11-02 19:27:26 1257414 False 38361805\n",
"331 2023-11-02 19:51:38 1278483 False 39640288\n",
"330 2023-11-02 20:15:49 1299498 False 40939786\n",
"329 2023-11-02 20:40:01 1320554 False 42260340\n",
"328 2023-11-02 21:04:15 1341648 False 43601988\n",
"327 2023-11-02 21:28:30 1362737 False 44964725\n",
"326 2023-11-02 21:52:45 1383824 False 46348549\n",
"325 2023-11-03 08:36:09 1403976 False 47752525\n",
"324 2023-11-03 08:59:29 1425000 False 49177524\n",
"323 2023-11-03 09:23:42 1446042 False 50623566\n",
"322 2023-11-03 09:47:56 1467092 False 52090658\n",
"321 2023-11-03 10:12:08 1488110 False 53578767\n",
"320 2023-11-03 10:36:21 1509147 False 55087914\n",
"319 2023-11-03 11:00:36 1530228 False 56618142\n",
"318 2023-11-03 11:24:51 1551286 False 58169428\n",
"317 2023-11-03 11:49:05 1572325 False 59741753\n",
"316 2023-11-03 12:13:17 1593359 False 61335112\n",
"315 2023-11-03 12:37:36 1614451 False 62949563\n",
"314 2023-11-03 13:01:49 1635477 False 64585041\n",
"313 2023-11-03 13:26:04 1656519 False 66241560\n",
"312 2023-11-03 13:50:17 1677548 False 67919108\n",
"311 2023-11-03 14:14:32 1698585 False 69617693\n",
"310 2023-11-03 14:38:47 1719606 False 71337299\n",
"309 2023-11-03 15:03:01 1740627 False 73077926\n",
"308 2023-11-03 15:27:17 1761672 False 74839598\n",
"307 2023-11-03 16:16:18 1782677 False 76622275\n",
"306 2023-11-03 17:16:20 1783123 False 78405398\n",
"305 2023-11-03 18:16:21 1783123 False 80188521\n",
"304 2023-11-03 19:16:27 1795361 False 81983882\n",
"303 2023-11-03 19:39:02 1816450 False 83800332\n",
"302 2023-11-03 20:01:37 1837479 False 85637811\n",
"301 2023-11-03 20:24:08 1858524 False 87496335\n",
"300 2023-11-03 20:46:42 1879594 False 89375929\n",
"299 2023-11-03 21:09:15 1900660 False 91276589\n",
"298 2023-11-03 21:31:50 1921736 False 93198326\n",
"297 2023-11-03 21:54:24 1942799 False 95141125\n",
"296 2023-11-03 22:16:58 1963833 False 97104958\n",
"295 2023-11-04 08:38:15 1981665 False 99086623\n",
"294 2023-11-04 08:59:57 2002712 False 101089335\n",
"293 2023-11-04 09:22:35 2023799 False 103113134\n",
"292 2023-11-04 09:45:11 2044859 False 105157994\n",
"291 2023-11-04 10:08:45 2065967 False 107223960\n",
"290 2023-11-04 10:32:43 2087012 False 109310972\n",
"289 2023-11-04 10:56:45 2108082 False 111419054\n",
"288 2023-11-04 11:20:45 2129143 False 113548197\n",
"287 2023-11-04 11:44:45 2150216 False 115698413\n",
"286 2023-11-04 12:08:43 2171256 False 117869669\n",
"285 2023-11-04 12:32:46 2192350 False 120062019\n",
"284 2023-11-04 12:56:44 2213397 False 122275416\n",
"283 2023-11-04 13:20:41 2234400 False 124509816\n",
"282 2023-11-04 13:44:38 2255408 False 126765224\n",
"281 2023-11-04 14:08:38 2276458 False 129041682\n",
"280 2023-11-04 14:32:36 2297463 False 131339145\n",
"279 2023-11-04 14:56:34 2318484 False 133657629\n",
"278 2023-11-04 15:20:37 2339568 False 135997197\n",
"277 2023-11-04 15:44:37 2360597 False 138357794\n",
"276 2023-11-04 16:08:38 2381647 False 140739441\n",
"275 2023-11-04 16:32:39 2402676 False 143142116\n",
"274 2023-11-04 16:56:38 2423678 False 145565794\n",
"273 2023-11-04 17:20:41 2444770 False 148010565\n",
"272 2023-11-04 17:44:42 2465802 False 150476366\n",
"271 2023-11-04 18:10:15 2486849 False 152963215\n",
"270 2023-11-04 18:32:56 2507904 False 155471119\n",
"269 2023-11-04 18:56:42 2528912 False 158000031\n",
"268 2023-11-04 19:20:42 2549946 False 160549977\n",
"267 2023-11-04 19:44:41 2570991 False 163120968\n",
"266 2023-11-04 20:08:43 2592051 False 165713019\n",
"265 2023-11-04 20:32:43 2613082 False 168326101\n",
"264 2023-11-04 20:56:42 2634130 False 170960231\n",
"263 2023-11-04 21:20:45 2655214 False 173615445\n",
"262 2023-11-04 21:44:43 2676256 False 176291700\n",
"261 2023-11-04 22:08:45 2697321 False 178989022\n",
"260 2023-11-04 22:32:44 2718371 False 181707393\n",
"259 2023-11-05 07:53:34 2740757 False 184448150\n",
"258 2023-11-05 08:16:45 2761799 False 187209950\n",
"257 2023-11-05 08:40:47 2782846 False 189992796\n",
"256 2023-11-05 09:04:51 2803925 False 192796721\n",
"255 2023-11-05 09:28:50 2824938 False 195621660\n",
"254 2023-11-05 09:52:50 2845949 False 198467608\n",
"253 2023-11-05 10:16:56 2867038 False 201334647\n",
"252 2023-11-05 10:41:01 2888130 False 204222777\n",
"251 2023-11-05 11:05:09 2909222 False 207131998\n",
"250 2023-11-05 11:29:10 2930238 False 210062236\n",
"249 2023-11-05 11:53:15 2951308 False 213013545\n",
"248 2023-11-05 12:17:15 2972316 False 215985861\n",
"247 2023-11-05 12:41:21 2993387 False 218979248\n",
"246 2023-11-05 13:05:26 3014461 False 221993709\n",
"245 2023-11-05 13:29:32 3035547 False 225029256\n",
"244 2023-11-05 13:53:39 3056618 False 228085875\n",
"243 2023-11-05 14:17:47 3077702 False 231163577\n",
"242 2023-11-05 14:41:53 3098770 False 234262347\n",
"241 2023-11-05 15:06:00 3119831 False 237382178\n",
"240 2023-11-05 15:30:05 3140881 False 240523059\n",
"239 2023-11-05 15:54:12 3161928 False 243684987\n",
"238 2023-11-05 16:18:18 3182973 False 246867960\n",
"237 2023-11-05 16:42:22 3204002 False 250071961\n",
"236 2023-11-05 17:08:33 3225044 False 253297005\n",
"235 2023-11-05 17:30:33 3246083 False 256543088\n",
"234 2023-11-05 17:54:39 3267143 False 259810231\n",
"233 2023-11-05 18:18:45 3288212 False 263098442\n",
"232 2023-11-05 18:42:48 3309214 False 266407656\n",
"231 2023-11-05 19:07:19 3330293 False 269737949\n",
"230 2023-11-05 19:31:01 3351353 False 273089302\n",
"229 2023-11-05 19:55:03 3372356 False 276461658\n",
"228 2023-11-05 20:19:05 3393364 False 279855022\n",
"227 2023-11-05 20:43:08 3414372 False 283269394\n",
"226 2023-11-05 21:07:16 3435453 False 286704847\n",
"225 2023-11-06 07:45:46 3451576 False 290156422\n",
"224 2023-11-06 08:08:59 3472607 False 293629030\n",
"223 2023-11-06 08:33:06 3493660 False 297122690\n",
"222 2023-11-06 08:57:10 3514668 False 300637357\n",
"221 2023-11-06 09:21:18 3535723 False 304173080\n",
"220 2023-11-06 09:45:22 3556736 False 307729816\n",
"219 2023-11-06 10:09:28 3577778 False 311307594\n",
"218 2023-11-06 10:33:35 3598822 False 314906416\n",
"217 2023-11-06 11:33:37 3618352 False 318524769\n",
"216 2023-11-06 12:29:15 3639384 False 322164153\n",
"215 2023-11-06 12:54:07 3660421 False 325824573\n",
"214 2023-11-06 13:18:58 3681431 False 329506005\n",
"213 2023-11-06 13:43:49 3702455 False 333208460\n",
"212 2023-11-06 14:08:46 3723544 False 336932004\n",
"211 2023-11-06 14:33:39 3744584 False 340676587\n",
"210 2023-11-06 14:58:33 3765612 False 344442200\n",
"209 2023-11-06 15:23:24 3786618 False 348228817\n",
"208 2023-11-06 15:48:20 3807699 False 352036516\n",
"207 2023-11-06 16:13:18 3828791 False 355865307\n",
"206 2023-11-06 16:38:10 3849807 False 359715114\n",
"205 2023-11-06 17:04:52 3870817 False 363585931\n",
"204 2023-11-06 17:27:54 3891862 False 367477792\n",
"203 2023-11-06 17:52:53 3912954 False 371390746\n",
"202 2023-11-06 18:17:46 3933988 False 375324734\n",
"201 2023-11-06 18:42:39 3955001 False 379279735\n",
"200 2023-11-06 19:07:30 3976035 False 383255770\n",
"199 2023-11-06 19:32:24 3997090 False 387252860\n",
"198 2023-11-06 19:57:13 4018103 False 391270963\n",
"197 2023-11-06 20:22:05 4039135 False 395310098\n",
"196 2023-11-06 20:46:57 4060169 False 399370267\n",
"195 2023-11-06 21:11:48 4081185 False 403451451\n",
"194 2023-11-06 21:36:41 4102211 False 407553662\n",
"193 2023-11-07 07:41:52 4122436 False 411676099\n",
"192 2023-11-07 08:05:55 4143442 False 415819540\n",
"191 2023-11-07 08:30:48 4164471 False 419984011\n",
"190 2023-11-07 08:55:44 4185528 False 424169540\n",
"189 2023-11-07 09:20:39 4206584 False 428376123\n",
"188 2023-11-07 09:45:32 4227610 False 432603733\n",
"187 2023-11-07 10:10:29 4248673 False 436852406\n",
"186 2023-11-07 10:35:29 4269744 False 441122149\n",
"185 2023-11-07 11:00:09 4290749 False 445412898\n",
"184 2023-11-07 11:24:17 4311828 False 449724726\n",
"183 2023-11-07 11:48:25 4332862 False 454057587\n",
"182 2023-11-07 12:12:33 4353920 False 458411507\n",
"181 2023-11-07 12:36:43 4375004 False 462786510\n",
"180 2023-11-07 13:00:48 4396006 False 467182516\n",
"179 2023-11-07 13:24:58 4417088 False 471599604\n",
"178 2023-11-07 13:49:09 4438161 False 476037765\n",
"177 2023-11-07 14:13:17 4459206 False 480496971\n",
"176 2023-11-07 14:37:26 4480269 False 484977239\n",
"175 2023-11-07 15:08:37 4501279 False 489478518\n",
"174 2023-11-07 15:25:39 4522300 False 494000818\n",
"173 2023-11-07 15:49:47 4543334 False 498544153\n",
"172 2023-11-07 16:13:55 4564376 False 503108529\n",
"171 2023-11-07 16:38:06 4585439 False 507693968\n",
"170 2023-11-07 17:03:58 4606497 False 512300465\n",
"169 2023-11-07 17:26:25 4627547 False 516928012\n",
"168 2023-11-07 17:50:32 4648568 False 521576580\n",
"167 2023-11-07 18:14:40 4669581 False 526246161\n",
"166 2023-11-07 18:38:52 4690676 False 530936836\n",
"165 2023-11-07 19:03:03 4711723 False 535648559\n",
"164 2023-11-07 19:27:12 4732767 False 540381327\n",
"163 2023-11-07 19:51:24 4753851 False 545135178\n",
"162 2023-11-07 20:15:33 4774901 False 549910079\n",
"161 2023-11-07 20:39:45 4795975 False 554706054\n",
"160 2023-11-07 21:03:58 4817059 False 559523113\n",
"159 2023-11-07 21:28:08 4838116 False 564361229\n",
"158 2023-11-07 21:52:14 4859124 False 569220354\n",
"157 2023-11-08 07:43:32 4869212 False 574089566\n",
"156 2023-11-08 08:06:58 4890270 False 578979836\n",
"155 2023-11-08 08:31:12 4911362 False 583891198\n",
"154 2023-11-08 08:55:26 4932451 False 588823649\n",
"153 2023-11-08 09:19:39 4953535 False 593777184\n",
"152 2023-11-08 09:43:51 4974577 False 598751761\n",
"151 2023-11-08 10:08:05 4995656 False 603747417\n",
"150 2023-11-08 10:32:17 5016706 False 608764123\n",
"149 2023-11-08 10:56:28 5037756 False 613801878\n",
"148 2023-11-08 11:20:38 5058769 False 618860647\n",
"147 2023-11-08 11:44:52 5079855 False 623940502\n",
"146 2023-11-08 12:09:04 5100876 False 629041379\n",
"145 2023-11-08 12:33:17 5121945 False 634163324\n",
"144 2023-11-08 12:57:31 5143005 False 639306328\n",
"143 2023-11-08 13:21:42 5164029 False 644470357\n",
"142 2023-11-08 13:45:52 5185042 False 649655399\n",
"141 2023-11-08 14:10:09 5206128 False 654861527\n",
"140 2023-11-08 14:34:18 5227136 False 660088664\n",
"139 2023-11-08 14:58:36 5248233 False 665336897\n",
"138 2023-11-08 15:22:52 5269317 False 670606214\n",
"137 2023-11-08 15:47:08 5290401 False 675896616\n",
"136 2023-11-08 16:11:19 5311404 False 681208020\n",
"135 2023-11-08 16:35:34 5332483 False 686540502\n",
"134 2023-11-08 16:59:51 5353548 False 691894051\n",
"133 2023-11-08 17:24:06 5374606 False 697268657\n",
"132 2023-11-08 17:48:18 5395624 False 702664281\n",
"131 2023-11-08 18:12:30 5416651 False 708080932\n",
"130 2023-11-08 18:36:46 5437724 False 713518656\n",
"129 2023-11-08 19:00:59 5458782 False 718977438\n",
"128 2023-11-08 19:25:12 5479803 False 724457241\n",
"127 2023-11-08 19:49:23 5500829 False 729958071\n",
"126 2023-11-08 20:13:36 5521863 False 735479934\n",
"125 2023-11-08 20:37:48 5542900 False 741022834\n",
"124 2023-11-08 21:01:59 5563905 False 746586740\n",
"123 2023-11-08 21:26:11 5584932 False 752171671\n",
"122 2023-11-09 09:29:32 5602952 False 757774623\n",
"121 2023-11-09 10:11:53 5623971 False 763398594\n",
"120 2023-11-09 10:36:12 5645044 False 769043638\n",
"119 2023-11-09 11:00:30 5666128 False 774709766\n",
"118 2023-11-09 11:24:46 5687199 False 780396965\n",
"117 2023-11-09 11:49:02 5708217 False 786105183\n",
"116 2023-11-09 12:13:19 5729280 False 791834463\n",
"115 2023-11-09 12:37:37 5750351 False 797584814\n",
"114 2023-11-09 13:02:26 5771398 False 803356213\n",
"113 2023-11-09 13:26:10 5792446 False 809148658\n",
"112 2023-11-09 13:50:25 5813498 False 814962157\n",
"111 2023-11-09 14:14:38 5834517 False 820796673\n",
"110 2023-11-09 14:38:55 5855559 False 826652232\n",
"109 2023-11-09 15:03:10 5876582 False 832528814\n",
"108 2023-11-10 08:18:37 5894028 False 838422842\n",
"107 2023-11-10 09:01:57 5915075 False 844337918\n",
"106 2023-11-10 09:26:18 5936149 False 850274066\n",
"105 2023-11-10 09:50:39 5957225 False 856231291\n",
"104 2023-11-10 10:14:58 5978262 False 862209553\n",
"103 2023-11-10 10:39:20 5999327 False 868208880\n",
"102 2023-11-10 11:03:40 6020382 False 874229262\n",
"101 2023-11-10 11:28:00 6041438 False 880270700\n",
"100 2023-11-10 11:52:20 6062487 False 886333187\n",
"99 2023-11-10 12:16:39 6083527 False 892416714\n",
"98 2023-11-10 12:41:01 6104595 False 898521309\n",
"97 2023-11-10 13:05:19 6125613 False 904646922\n",
"96 2023-11-10 13:29:42 6146682 False 910793604\n",
"95 2023-11-10 13:54:03 6167732 False 916961336\n",
"94 2023-11-10 14:18:25 6188808 False 923150143\n",
"93 2023-11-10 14:42:46 6209855 False 929359998\n",
"92 2023-11-10 15:07:05 6230881 False 935590879\n",
"91 2023-11-10 15:31:25 6251915 False 941842794\n",
"90 2023-11-11 09:28:24 6273144 False 948115938\n",
"89 2023-11-11 10:34:27 6286082 False 954402020\n",
"88 2023-11-11 11:12:21 6307153 False 960709173\n",
"87 2023-11-11 12:06:28 6328179 False 967037353\n",
"86 2023-11-11 12:43:52 6349190 False 973386543\n",
"85 2023-11-11 13:23:40 6370203 False 979756746\n",
"84 2023-11-11 13:54:46 6391216 False 986147962\n",
"83 2023-11-11 14:42:56 0 True 0\n",
"82 2023-11-11 15:31:21 22121 False 22121\n",
"81 2023-11-12 09:07:40 36640 False 58761\n",
"80 2023-11-12 10:02:29 57648 False 116408\n",
"79 2023-11-12 10:44:53 78705 False 195114\n",
"78 2023-11-12 11:09:26 99789 False 294903\n",
"77 2023-11-12 11:33:57 120850 False 415753\n",
"76 2023-11-12 11:58:24 141863 False 557616\n",
"75 2023-11-12 12:22:52 162873 False 720489\n",
"74 2023-11-12 12:47:25 183963 False 904452\n",
"73 2023-11-12 13:11:53 204976 False 1109427\n",
"72 2023-11-12 13:36:26 226057 False 1335484\n",
"71 2023-11-12 14:00:56 247128 False 1582612\n",
"70 2023-11-12 14:49:07 269338 False 1851951\n",
"69 2023-11-12 15:12:43 290346 False 2142297\n",
"68 2023-11-13 09:21:59 12994 True 12994\n",
"67 2023-11-13 09:22:03 0 True 0\n",
"66 2023-11-13 10:05:27 21063 False 21063\n",
"65 2023-11-13 10:48:14 42152 False 63215\n",
"64 2023-11-13 11:12:50 63210 False 126425\n",
"63 2023-11-13 11:54:12 84273 False 210698\n",
"62 2023-11-13 13:07:49 101028 False 311727\n",
"61 2023-11-13 13:43:20 122097 False 433823\n",
"60 2023-11-13 14:20:14 143181 False 577004\n",
"59 2023-11-13 14:44:47 164228 False 741232\n",
"58 2023-11-13 15:09:25 185317 False 926549\n",
"57 2023-11-13 15:33:58 206338 False 1132887\n",
"56 2023-11-13 15:58:33 227380 False 1360267\n",
"55 2023-11-13 16:23:08 248435 False 1608702\n",
"54 2023-11-14 08:50:44 254113 False 1862816\n",
"53 2023-11-14 09:19:27 275139 False 2137955\n",
"52 2023-11-14 09:44:08 296205 False 2434160\n",
"51 2023-11-14 10:08:44 317213 False 2751373\n",
"50 2023-11-14 10:33:20 338242 False 3089614\n",
"49 2023-11-14 10:57:59 359289 False 3448904\n",
"48 2023-11-14 11:22:38 380342 False 3829245\n",
"47 2023-11-14 11:47:19 401428 False 4230673\n",
"46 2023-11-14 12:11:58 422475 False 4653148\n",
"45 2023-11-14 12:48:28 445694 False 5098842\n",
"44 2023-11-14 13:20:27 466738 False 5565580\n",
"43 2023-11-14 14:05:06 487780 False 6053360\n",
"42 2023-11-14 15:05:06 503370 False 6556730\n",
"41 2023-11-16 08:27:47 0 True 0\n",
"40 2023-11-16 08:28:08 21404 False 21404\n",
"39 2023-11-16 08:28:28 42407 False 63811\n",
"38 2023-11-16 08:28:51 63491 False 127302\n",
"37 2023-11-16 08:29:03 84667 False 211969\n",
"36 2023-11-16 08:29:20 106082 False 318050\n",
"35 2023-11-16 08:29:40 127116 False 445166\n",
"34 2023-11-16 08:29:55 148562 False 593728\n",
"33 2023-11-16 08:30:38 176458 False 770186\n",
"32 2023-11-16 08:30:55 197497 False 967683\n",
"31 2023-11-16 08:31:05 219006 False 1186689\n",
"30 2023-11-16 08:31:37 240363 False 1427052\n",
"29 2023-11-16 08:31:51 261846 False 1688899\n",
"28 2023-11-16 08:32:15 282849 False 1971748\n",
"27 2023-11-16 08:32:50 303883 False 2275631\n",
"26 2023-11-16 08:33:07 325188 False 2600818\n",
"25 2023-11-16 08:33:24 346610 False 2947429\n",
"24 2023-11-16 08:33:45 367639 False 3315068\n",
"23 2023-11-16 08:34:04 388972 False 3704040\n",
"22 2023-11-16 08:34:23 410217 False 4114257\n",
"21 2023-11-16 08:34:47 431227 False 4545484\n",
"20 2023-11-16 08:35:01 452282 False 4997766\n",
"19 2023-11-16 08:35:11 473647 False 5471414\n",
"18 2023-11-16 08:35:35 494773 False 5966187\n",
"17 2023-11-16 08:35:49 515849 False 6482036\n",
"16 2023-11-16 08:36:25 543559 False 7025595\n",
"15 2023-11-16 08:36:48 564588 False 7590182\n",
"14 2023-11-16 08:36:58 0 True 0\n",
"13 2023-11-16 08:51:10 21084 False 21084\n",
"12 2023-11-16 09:16:00 42147 False 63231\n",
"11 2023-11-16 09:40:51 63213 False 126444\n",
"10 2023-11-16 10:05:41 84268 False 210711\n",
"9 2023-11-16 10:30:31 105339 False 316050\n",
"8 2023-11-16 10:55:21 126407 False 442457\n",
"7 2023-11-16 11:55:26 142018 False 584475\n",
"6 2023-11-16 12:55:34 142018 False 726492\n",
"5 2023-11-16 13:55:37 142018 False 868510\n",
"4 2023-11-16 14:55:41 142018 False 1010528\n",
"3 2023-11-16 15:55:46 142018 False 1152546\n",
"2 2023-11-16 16:55:48 142018 False 1294563\n",
"1 2023-11-16 17:55:50 142018 False 1436581\n",
"0 2023-11-16 18:55:54 142018 False 1578599\n"
]
}
],
"source": [
"with pd.option_context('display.max_rows', None, 'display.max_columns', None): # more options can be specified also\n",
" print(total_volume_df)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "tbDataCollector",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -0,0 +1,463 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"group\n",
"1 126406.875\n",
"2 105338.625\n",
"3 84267.750\n",
"4 63212.625\n",
"5 42147.000\n",
"6 21084.000\n",
"7 0.000\n",
"8 543558.750\n",
"9 515849.250\n",
"10 494773.125\n",
"11 473647.125\n",
"12 452282.250\n",
"13 431227.125\n",
"14 410216.625\n",
"15 388972.500\n",
"16 367639.125\n",
"17 346610.250\n",
"18 325187.625\n",
"19 303883.125\n",
"20 282849.000\n",
"21 261846.375\n",
"22 240363.375\n",
"23 219006.375\n",
"24 197497.125\n",
"25 176457.750\n",
"26 148561.875\n",
"27 127115.625\n",
"28 106081.500\n",
"29 84666.750\n",
"30 63490.875\n",
"31 42406.875\n",
"32 21404.250\n",
"33 0.000\n",
"34 487780.125\n",
"35 466738.125\n",
"36 445693.500\n",
"37 422475.375\n",
"38 401428.125\n",
"39 380341.500\n",
"40 359289.000\n",
"41 338241.750\n",
"42 317212.875\n",
"43 296205.000\n",
"44 275139.375\n",
"45 254113.125\n",
"46 248435.250\n",
"47 227380.125\n",
"48 206338.125\n",
"49 185317.125\n",
"50 164227.875\n",
"51 143180.625\n",
"52 122096.625\n",
"53 101028.375\n",
"54 84273.000\n",
"55 63210.000\n",
"56 42152.250\n",
"57 21063.000\n",
"58 0.000\n",
"59 269338.125\n",
"60 247128.000\n",
"61 226057.125\n",
"62 204975.750\n",
"63 183962.625\n",
"64 162873.375\n",
"65 141862.875\n",
"66 120849.750\n",
"67 99789.375\n",
"68 78705.375\n",
"69 57647.625\n",
"70 36639.750\n",
"71 22120.875\n",
"72 0.000\n",
"73 6370203.000\n",
"74 6349189.875\n",
"75 6328179.375\n",
"76 6307153.125\n",
"77 6286082.250\n",
"78 6273143.625\n",
"79 6251915.250\n",
"80 6230881.125\n",
"81 6209854.875\n",
"82 6188807.625\n",
"83 6167731.500\n",
"84 6146681.625\n",
"85 6125613.375\n",
"86 6104595.000\n",
"87 6083526.750\n",
"88 6062487.375\n",
"89 6041437.500\n",
"90 6020382.375\n",
"91 5999327.250\n",
"92 5978261.625\n",
"93 5957224.875\n",
"94 5936148.750\n",
"95 5915075.250\n",
"96 5894028.000\n",
"97 5876582.250\n",
"98 5855558.625\n",
"99 5834516.625\n",
"100 5813498.250\n",
"101 5792445.750\n",
"102 5771398.500\n",
"103 5750351.250\n",
"104 5729280.375\n",
"105 5708217.375\n",
"106 5687199.000\n",
"107 5666128.125\n",
"108 5645044.125\n",
"109 5623970.625\n",
"110 5602952.250\n",
"111 5584931.625\n",
"112 5563905.375\n",
"113 5542900.125\n",
"114 5521863.375\n",
"115 5500829.250\n",
"116 5479803.000\n",
"117 5458782.000\n",
"118 5437724.250\n",
"119 5416650.750\n",
"120 5395624.500\n",
"121 5374606.125\n",
"122 5353548.375\n",
"123 5332482.750\n",
"124 5311404.000\n",
"125 5290401.375\n",
"126 5269317.375\n",
"127 5248233.375\n",
"128 5227136.250\n",
"129 5206128.375\n",
"130 5185041.750\n",
"131 5164028.625\n",
"132 5143005.000\n",
"133 5121944.625\n",
"134 5100876.375\n",
"135 5079855.375\n",
"136 5058768.750\n",
"137 5037755.625\n",
"138 5016705.750\n",
"139 4995655.875\n",
"140 4974577.125\n",
"141 4953535.125\n",
"142 4932451.125\n",
"143 4911361.875\n",
"144 4890270.000\n",
"145 4869212.250\n",
"146 4859124.375\n",
"147 4838116.500\n",
"148 4817058.750\n",
"149 4795974.750\n",
"150 4774901.250\n",
"151 4753851.375\n",
"152 4732767.375\n",
"153 4711722.750\n",
"154 4690675.500\n",
"155 4669581.000\n",
"156 4648567.875\n",
"157 4627546.875\n",
"158 4606497.000\n",
"159 4585439.250\n",
"160 4564376.250\n",
"161 4543334.250\n",
"162 4522300.125\n",
"163 4501279.125\n",
"164 4480268.625\n",
"165 4459205.625\n",
"166 4438161.000\n",
"167 4417087.500\n",
"168 4396006.125\n",
"169 4375003.500\n",
"170 4353919.500\n",
"171 4332861.750\n",
"172 4311827.625\n",
"173 4290748.875\n",
"174 4269743.625\n",
"175 4248672.750\n",
"176 4227609.750\n",
"177 4206583.500\n",
"178 4185528.375\n",
"179 4164470.625\n",
"180 4143441.750\n",
"181 4122436.500\n",
"182 4102210.875\n",
"183 4081184.625\n",
"184 4060168.875\n",
"185 4039134.750\n",
"186 4018103.250\n",
"187 3997090.125\n",
"188 3976035.000\n",
"189 3955000.875\n",
"190 3933987.750\n",
"191 3912953.625\n",
"192 3891861.750\n",
"193 3870817.125\n",
"194 3849806.625\n",
"195 3828790.875\n",
"196 3807699.000\n",
"197 3786617.625\n",
"198 3765612.375\n",
"199 3744583.500\n",
"200 3723544.125\n",
"201 3702454.875\n",
"202 3681431.250\n",
"203 3660420.750\n",
"204 3639384.000\n",
"205 3618352.500\n",
"206 3598822.500\n",
"207 3577777.875\n",
"208 3556735.875\n",
"209 3535722.750\n",
"210 3514667.625\n",
"211 3493659.750\n",
"212 3472607.250\n",
"213 3451575.750\n",
"214 3435453.000\n",
"215 3414371.625\n",
"216 3393363.750\n",
"217 3372355.875\n",
"218 3351353.250\n",
"219 3330292.875\n",
"220 3309214.125\n",
"221 3288211.500\n",
"222 3267143.250\n",
"223 3246082.875\n",
"224 3225043.500\n",
"225 3204001.500\n",
"226 3182972.625\n",
"227 3161928.000\n",
"228 3140880.750\n",
"229 3119830.875\n",
"230 3098770.500\n",
"231 3077702.250\n",
"232 3056618.250\n",
"233 3035547.375\n",
"234 3014460.750\n",
"235 2993387.250\n",
"236 2972316.375\n",
"237 2951308.500\n",
"238 2930237.625\n",
"239 2909221.875\n",
"240 2888130.000\n",
"241 2867038.125\n",
"242 2845948.875\n",
"243 2824938.375\n",
"244 2803925.250\n",
"245 2782846.500\n",
"246 2761799.250\n",
"247 2740757.250\n",
"248 2718371.250\n",
"249 2697321.375\n",
"250 2676255.750\n",
"251 2655213.750\n",
"252 2634129.750\n",
"253 2613082.500\n",
"254 2592051.000\n",
"255 2570990.625\n",
"256 2549946.000\n",
"257 2528911.875\n",
"258 2507904.000\n",
"259 2486848.875\n",
"260 2465801.625\n",
"261 2444770.125\n",
"262 2423678.250\n",
"263 2402675.625\n",
"264 2381646.750\n",
"265 2360596.875\n",
"266 2339568.000\n",
"267 2318484.000\n",
"268 2297463.000\n",
"269 2276457.750\n",
"270 2255407.875\n",
"271 2234400.000\n",
"272 2213397.375\n",
"273 2192350.125\n",
"274 2171255.625\n",
"275 2150216.250\n",
"276 2129142.750\n",
"277 2108082.375\n",
"278 2087011.500\n",
"279 2065966.875\n",
"280 2044859.250\n",
"281 2023798.875\n",
"282 2002712.250\n",
"283 1981665.000\n",
"284 1963833.375\n",
"285 1942799.250\n",
"286 1921736.250\n",
"287 1900660.125\n",
"288 1879594.500\n",
"289 1858523.625\n",
"290 1837479.000\n",
"291 1816450.125\n",
"292 1795360.875\n",
"293 1783123.125\n",
"294 1782676.875\n",
"295 1761671.625\n",
"296 1740627.000\n",
"297 1719606.000\n",
"298 1698585.000\n",
"299 1677548.250\n",
"300 1656519.375\n",
"301 1635477.375\n",
"302 1614451.125\n",
"303 1593359.250\n",
"304 1572325.125\n",
"305 1551285.750\n",
"306 1530228.000\n",
"307 1509146.625\n",
"308 1488109.875\n",
"309 1467091.500\n",
"310 1446041.625\n",
"311 1424999.625\n",
"312 1403976.000\n",
"313 1383823.875\n",
"314 1362737.250\n",
"315 1341648.000\n",
"316 1320553.500\n",
"317 1299498.375\n",
"318 1278482.625\n",
"319 1257414.375\n",
"320 1236343.500\n",
"321 1215272.625\n",
"322 1194217.500\n",
"323 1173133.500\n",
"324 1152054.750\n",
"325 1130997.000\n",
"326 1109913.000\n",
"327 1088844.750\n",
"328 1067755.500\n",
"329 1046729.250\n",
"330 1025640.000\n",
"331 1004556.000\n",
"332 983487.750\n",
"333 962456.250\n",
"334 941445.750\n",
"335 920361.750\n",
"336 899277.750\n",
"337 878175.375\n",
"338 857128.125\n",
"339 836122.875\n",
"340 815091.375\n",
"341 794044.125\n",
"342 773036.250\n",
"343 752007.375\n",
"344 731004.750\n",
"345 709957.500\n",
"346 688949.625\n",
"347 673727.250\n",
"348 652666.875\n",
"349 631632.750\n",
"350 610598.625\n",
"351 589509.375\n",
"352 568480.500\n",
"353 547407.000\n",
"354 526375.500\n",
"355 505336.125\n",
"356 484273.125\n",
"357 463199.625\n",
"358 442139.250\n",
"359 421086.750\n",
"360 400008.000\n",
"361 378973.875\n",
"362 357895.125\n",
"363 336882.000\n",
"364 315819.000\n",
"365 294771.750\n",
"366 273708.750\n",
"367 252674.625\n",
"368 231582.750\n",
"369 210496.125\n",
"370 189430.500\n",
"371 168367.500\n",
"372 147359.625\n",
"373 126286.125\n",
"374 105196.875\n",
"375 84191.625\n",
"376 63157.500\n",
"377 42084.000\n",
"378 21068.250\n",
"379 0.000\n",
"Name: value, dtype: float64\n"
]
}
],
"source": [
"import pandas as pd\n",
"\n",
"# Re-loading the CSV files as the code execution state was reset\n",
"total_volume_path = '/Users/nico/Downloads/monthly_total.csv'\n",
"flow_rate_path = '/Users/nico/Downloads/flow_rate.csv'\n",
"\n",
"total_volume_df = pd.read_csv(total_volume_path)\n",
"flow_rate_df = pd.read_csv(flow_rate_path)\n",
"\n",
"# Convert timestamps to datetime\n",
"total_volume_df['timestamp'] = pd.to_datetime(total_volume_df['timestamp'], format=\"%m/%d/%Y %I:%M:%S %p\")\n",
"flow_rate_df['timestamp'] = pd.to_datetime(flow_rate_df['timestamp'], format=\"%m/%d/%Y %I:%M:%S %p\")\n",
"\n",
"# Filter out the erroneous data in total volume\n",
"total_volume_df = total_volume_df[total_volume_df['timestamp'] <= '2023-11-17 11:00:00']\n",
"\n",
"# Sort the flow rate data in ascending order by timestamp\n",
"flow_rate_df = flow_rate_df.sort_values(by='timestamp')\n",
"\n",
"# Calculate time differences and total volume for flow rate data\n",
"flow_rate_df['time_diff'] = flow_rate_df['timestamp'].diff().dt.total_seconds().div(60) # in minutes\n",
"flow_rate_df.iloc[0, flow_rate_df.columns.get_loc('time_diff')] = 0\n",
"flow_rate_df['total_volume'] = flow_rate_df['value'] * flow_rate_df['time_diff']\n",
"monthly_flow_rate_total = flow_rate_df.groupby(pd.Grouper(key='timestamp', freq='M'))['total_volume'].sum()\n",
"\n",
"# Adjusting the calculation for the total volume data\n",
"total_volume_df['reset'] = total_volume_df['value'].lt(total_volume_df['value'].shift())\n",
"total_volume_df['group'] = total_volume_df['reset'].cumsum()\n",
"last_values_before_reset = total_volume_df[total_volume_df['reset']].groupby('group').first()['value']\n",
"last_value_final_group = total_volume_df[~total_volume_df['reset']].iloc[-1]['value']\n",
"\"\"\"\n",
"last_values_before_reset = last_values_before_reset.append(pd.Series([last_value_final_group]))\n",
"monthly_total_corrected = last_values_before_reset.groupby(pd.Grouper(freq='M')).sum()\n",
"\n",
"# Compare with the flow rate total\n",
"comparison_df_corrected = pd.DataFrame({'Corrected Total Volume': monthly_total_corrected, 'Flow Rate Total': monthly_flow_rate_total})\n",
"comparison_df_corrected.reset_index() # Resetting index for better visualization\n",
"comparison_df_corrected.head()\n",
"\"\"\"\n",
"with pd.option_context('display.max_rows', None, 'display.max_columns', None): # more options can be specified also\n",
" print(last_values_before_reset)\n",
"\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "tbDataCollector",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}