updated drivers

This commit is contained in:
Nico Melone
2023-01-26 22:56:53 -06:00
parent af3f913fc4
commit 93e27579cc
25 changed files with 3873 additions and 90 deletions

BIN
.DS_Store vendored

Binary file not shown.

BIN
Pub_Sub/.DS_Store vendored

Binary file not shown.

Binary file not shown.

View File

@@ -1,15 +0,0 @@
# Enter your python code.
import json
from common.Logger import logger
from quickfaas.remotebus import publish
import re, uuid
def sendData(message):
logger.debug(message)
payload = {}
for measure in message["measures"]:
payload[measure["name"]] = measure["value"]
publish(__topic__, json.dumps(payload), __qos__)

View File

@@ -19,7 +19,7 @@
"groups": [
{
"name": "default",
"uploadInterval": 3600,
"uploadInterval": 600,
"reference": 10
}
],
@@ -300,7 +300,7 @@
"genericFuncs": [],
"uploadFuncs": [
{
"name": "Mistaway",
"name": "Send Data",
"trigger": "measure_event",
"topic": "v1/devices/me/telemetry",
"qos": 1,

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,218 @@
# Enter your python code.
import json, os
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot(reason="Rebooting for config file update"):
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
logger.info(reason)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
if not os.path.exists(path):
logger.info("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
if not os.path.exists(path + "/" + filename):
logger.info("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
def convertDStoJSON(ds):
j = dict()
for x in ds:
j[x["key"]] = x["value"]
return j
def convertJSONtoDS(j):
d = []
for key in j.keys():
d.append({"key": key, "value": j[key]})
return d
def checkCredentialConfig():
logger.info("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.info(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
checkFileExist("creds.json")
with open(credspath, "r") as c:
creds = json.load(c)
if creds:
logger.info("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.info("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
def checkParameterConfig(cfg):
logger.info("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.info("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.info("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.info("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
def get_totalizers():
try:
with open("/var/user/files/totalizers.json", "r") as t:
totalizers = json.load(t)
if not totalizers:
logger.info("-----INITIALIZING TOTALIZERS-----")
totalizers = {
"day": 0,
"week": 0,
"month": 0,
"year": 0,
"lifetime": 0,
"dayHolding": 0,
"weekHolding": 0,
"monthHolding": 0,
"yearHolding": 0
}
except:
totalizers = {
"day": 0,
"week": 0,
"month": 0,
"year": 0,
"lifetime": 0,
"dayHolding": 0,
"weekHolding": 0,
"monthHolding": 0,
"yearHolding": 0
}
return totalizers
def saveTotalizers(totalizers):
try:
with open("/var/user/files/totalizers.json", "w") as t:
json.dump(totalizers,t)
except Exception as e:
logger.error(e)
def totalizeDay(lifetime):
totalizers = get_totalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["dayHolding"]
if (not int(now.strftime("%d")) == int(totalizers["day"]) and now.hour >= 8 and now.minute == 0) or (abs(int(now.strftime("%d")) - int(totalizers["day"])) > 1 ):
totalizers["dayHolding"] = lifetime
totalizers["day"] = int(now.strftime("%d"))
saveTotalizers(totalizers)
reset = True
return (value,reset)
def totalizeMonth(lifetime):
totalizers = get_totalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["monthHolding"]
if (not int(now.strftime("%m")) == int(totalizers["month"]) and now.hour >= 8 and now.minute == 0) or (abs(int(now.strftime("%m")) - int(totalizers["month"])) > 1 ):
totalizers["monthHolding"] = lifetime
totalizers["month"] = now.strftime("%m")
saveTotalizers(totalizers)
reset = True
return (value,reset)
def sendData(message):
logger.debug(message)
try:
checkCredentialConfig()
except Exception as e:
logger.error(e)
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
resetPayload = {"ts": "", "values": {}}
for measure in message["measures"]:
try:
if abs(payload["ts"]/1000 - measure["timestamp"]) > 7200:
reboot(reason="Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}".format(payload["ts"]/1000,measure["timestamp"]))
if measure["name"] in ["accumulated_volume"]:
payload["values"]["today_volume"], dayReset = totalizeDay(measure["value"])
payload["values"]["month_volume"], monthReset = totalizeMonth(measure["value"])
if measure["name"] in ["today_volume", "yesterday_volume"]:
pass
else:
payload["values"][measure["name"]] = measure["value"]
except Exception as e:
logger.error(e)
publish(__topic__, json.dumps(payload), __qos__)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
if dayReset:
resetPayload["values"]["yesterday_volume"] = payload["values"]["today_volume"]
resetPayload["values"]["today_volume"] = 0
if monthReset:
resetPayload["values"]["last_month_volume"] = payload["values"]["month_volume"]
resetPayload["values"]["month_volume"] = 0
if resetPayload["values"]:
resetPayload["ts"] = 1 + (round(dt.timestamp(dt.now())/600)*600)*1000
publish(__topic__, json.dumps(resetPayload), __qos__)

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,218 @@
# Enter your python code.
import json, os
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot(reason="Rebooting for config file update"):
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
logger.info(reason)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
if not os.path.exists(path):
logger.info("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
if not os.path.exists(path + "/" + filename):
logger.info("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
def convertDStoJSON(ds):
j = dict()
for x in ds:
j[x["key"]] = x["value"]
return j
def convertJSONtoDS(j):
d = []
for key in j.keys():
d.append({"key": key, "value": j[key]})
return d
def checkCredentialConfig():
logger.info("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.info(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
checkFileExist("creds.json")
with open(credspath, "r") as c:
creds = json.load(c)
if creds:
logger.info("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.info("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
def checkParameterConfig(cfg):
logger.info("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.info("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.info("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.info("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
def get_totalizers():
try:
with open("/var/user/files/totalizers.json", "r") as t:
totalizers = json.load(t)
if not totalizers:
logger.info("-----INITIALIZING TOTALIZERS-----")
totalizers = {
"day": 0,
"week": 0,
"month": 0,
"year": 0,
"lifetime": 0,
"dayHolding": 0,
"weekHolding": 0,
"monthHolding": 0,
"yearHolding": 0
}
except:
totalizers = {
"day": 0,
"week": 0,
"month": 0,
"year": 0,
"lifetime": 0,
"dayHolding": 0,
"weekHolding": 0,
"monthHolding": 0,
"yearHolding": 0
}
return totalizers
def saveTotalizers(totalizers):
try:
with open("/var/user/files/totalizers.json", "w") as t:
json.dump(totalizers,t)
except Exception as e:
logger.error(e)
def totalizeDay(lifetime):
totalizers = get_totalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["dayHolding"]
if (not int(now.strftime("%d")) == int(totalizers["day"]) and now.hour >= 8 and now.minute == 0) or (abs(int(now.strftime("%d")) - int(totalizers["day"])) > 1 ):
totalizers["dayHolding"] = lifetime
totalizers["day"] = int(now.strftime("%d"))
saveTotalizers(totalizers)
reset = True
return (value,reset)
def totalizeMonth(lifetime):
totalizers = get_totalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["monthHolding"]
if (not int(now.strftime("%m")) == int(totalizers["month"]) and now.hour >= 8 and now.minute == 0) or (abs(int(now.strftime("%m")) - int(totalizers["month"])) > 1 ):
totalizers["monthHolding"] = lifetime
totalizers["month"] = now.strftime("%m")
saveTotalizers(totalizers)
reset = True
return (value,reset)
def sendData(message):
logger.debug(message)
try:
checkCredentialConfig()
except Exception as e:
logger.error(e)
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
resetPayload = {"ts": "", "values": {}}
for measure in message["measures"]:
try:
if abs(payload["ts"]/1000 - measure["timestamp"]) > 7200:
reboot(reason="Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}".format(payload["ts"]/1000,measure["timestamp"]))
if measure["name"] in ["accumulated_volume"]:
payload["values"]["today_volume"], dayReset = totalizeDay(measure["value"])
payload["values"]["month_volume"], monthReset = totalizeMonth(measure["value"])
if measure["name"] in ["today_volume", "yesterday_volume"]:
pass
else:
payload["values"][measure["name"]] = measure["value"]
except Exception as e:
logger.error(e)
publish(__topic__, json.dumps(payload), __qos__)
publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__)
if dayReset:
resetPayload["values"]["yesterday_volume"] = payload["values"]["today_volume"]
resetPayload["values"]["today_volume"] = 0
if monthReset:
resetPayload["values"]["last_month_volume"] = payload["values"]["month_volume"]
resetPayload["values"]["month_volume"] = 0
if resetPayload["values"]:
resetPayload["ts"] = 1 + (round(dt.timestamp(dt.now())/600)*600)*1000
publish(__topic__, json.dumps(resetPayload), __qos__)

View File

@@ -0,0 +1,60 @@
# Enter your python code.
import json
from quickfaas.measure import recall
from common.Logger import logger
def sync(wizard_api):
#get new values and send
payload = {}
try:
data = recall()#json.loads(recall().decode("utf-8"))
except Exception as e:
logger.error(e)
logger.info(data)
for controller in data:
for measure in controller["measures"]:
#publish measure
topic = "v1/devices/me/telemetry"
payload[measure["name"]] = measure["value"]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
wizard_api.mqtt_publish(topic, json.dumps(payload))
def writeplctag(value, wizard_api):
try:
#value = json.loads(value.replace("'",'"'))
logger.debug(value)
message = {"advvfdipp":{value["measurement"]: value["value"]}}
resp = wizard_api.write_plc_values(message)
#logger.debug("RETURN FROM WRITE: {}".format(resp))
return True
except Exception as e:
logger.debug(e)
return False
def receiveCommand(topic, payload, wizard_api):
try:
logger.debug(topic)
logger.debug(json.loads(payload))
p = json.loads(payload)
command = p["method"]
commands = {
"sync": sync,
"writeplctag": writeplctag,
}
if command == "setPLCTag":
result = commands["writeplctag"](p["params"],wizard_api)
if result:
sync(wizard_api)
#commands[command](p["mac"].lower(),p["payload"]["value"], wizard_api)
#logger.debug(command)
ack(topic.split("/")[-1], wizard_api)
except Exception as e:
logger.debug(e)
def ack(msgid, wizard_api):
#logger.debug(msgid)
#logger.debug(mac)
#logger.debug(name)
#logger.debug(value)
wizard_api.mqtt_publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}))

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,518 @@
# Enter your python code.
import json, math, time, os
from datetime import datetime as dt
from common.Logger import logger
from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
import re, uuid
from paho.mqtt import client
class RuntimeStats:
def __init__(self):
self.runs = {}
self.currentRun = 0
self.today = ""
self.todayString = ""
def manageTime(self):
if self.todayString != dt.strftime(dt.today(), "%Y-%m-%d"):
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.mktime(dt.strptime(self.todayString + " 23:59:59", "%Y-%m-%d %H:%M:%S").timetuple())
self.addDay()
self.today = dt.today()
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
days = list(self.runs.keys())
days.sort()
while (dt.strptime(days[-1],"%Y-%m-%d") - dt.strptime(days[0], "%Y-%m-%d")).days > 30:
self.removeDay(day=days[0])
days = list(self.runs.keys())
days.sort()
def addHertzDataPoint(self, frequency):
if frequency > 0:
self.manageTime()
try:
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"].append(frequency)
except:
self.runs[self.todayString]["run_" + str(self.currentRun)]["frequencies"] = [frequency]
def startRun(self):
if self.checkRunning():
self.endRun()
self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] = time.time()
def endRun(self):
self.runs[self.todayString]["run_" + str(self.currentRun)]["end"] = time.time()
self.currentRun += 1
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
def checkRunning(self):
if self.runs[self.todayString]["run_" + str(self.currentRun)]["start"] and not self.runs[self.todayString]["run_" + str(self.currentRun)]["end"]:
return True
return False
def addDay(self):
self.today = dt.today()
self.todayString = dt.strftime(self.today, "%Y-%m-%d")
self.currentRun = 1
self.runs[self.todayString] = {}
self.runs[self.todayString]["run_" + str(self.currentRun)] = {"start":0, "end": 0, "frequencies":[]}
def countRunsDay(self, day=None):
if not day:
day = self.todayString
return len(self.runs[day].keys())
def countRunsMultiDay(self, numDays=30):
total_runs = 0
for day in list(self.runs.keys()):
total_runs += self.countRunsDay(day=day)
return total_runs
def calculateAverageHertzDay(self, day=None, returnArray=False):
dayFrequencies = []
if not day:
day = self.todayString
for run in list(self.runs[day].keys()):
try:
dayFrequencies += self.runs[day][run]["frequencies"]
except Exception as e:
print("{} missing frequency data for {}".format(day,run))
if returnArray:
return dayFrequencies
return round(math.fsum(dayFrequencies)/len(dayFrequencies),2)
def calculateAverageHertzMultiDay(self, numDays=30):
self.manageTime()
frequencies = []
for day in list(self.runs.keys()):
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
try:
frequencies += self.calculateAverageHertzDay(day=day, returnArray=True)
except Exception as e:
print("{} missing frequency data".format(day))
if len(frequencies):
return round(math.fsum(frequencies)/len(frequencies), 2)
return 0
def calculateRunTimeDay(self, day=None, convertToHours=True):
total_time = 0
if not day:
day = self.todayString
for run in list(self.runs[day].keys()):
total_time = self.runs[day][run]["end"] - self.runs[day][run]["start"] + total_time
if convertToHours:
return self.convertSecondstoHours(total_time)
return total_time
def calculateRunTimeMultiDay(self, numDays=30, convertToHours=True):
total_time = 0
for day in list(self.runs.keys()):
if not day == self.todayString and (dt.strptime(self.todayString, "%Y-%m-%d") - dt.strptime(day, "%Y-%m-%d")).days <= numDays:
total_time += self.calculateRunTimeDay(day=day, convertToHours=False)
if convertToHours:
return self.convertSecondstoHours(total_time)
return total_time
def calculateRunPercentDay(self, day=None, precise=False):
if not day:
day = self.todayString
if precise:
return (self.calculateRunTimeDay(day=day)/24) * 100
return round((self.calculateRunTimeDay(day=day)/24) * 100, 2)
def calculateRunPercentMultiDay(self, numDays=30, precise=False):
self.manageTime()
if precise:
return (self.calculateRunTimeMultiDay()/(24*numDays)) * 100
return round((self.calculateRunTimeMultiDay()/(24*numDays)) * 100,2)
def removeDay(self, day=None):
if not day:
raise Exception("Day can not be None")
print("removing day {}".format(day))
del self.runs[day]
def convertSecondstoHours(self, seconds):
return round(seconds / (60*60),2)
def loadDataFromFile(self, filePath="/var/user/files/runtimestats.json"):
try:
with open(filePath, "r") as f:
temp = json.load(f)
self.runs = temp["data"]
self.currentRun = temp["current_run"]
self.today = dt.strptime(temp["current_day"], "%Y-%m-%d")
self.todayString = temp["current_day"]
self.manageTime()
except:
print("Could not find file at {}".format(filePath))
print("creating file")
self.addDay()
try:
with open(filePath, "w") as f:
d = {
"current_run": self.currentRun,
"current_day": self.todayString,
"data": self.runs
}
json.dump(d, f, indent=4)
except Exception as e:
print(e)
def saveDataToFile(self, filePath="/var/user/files/runtimestats.json"):
try:
print("Saving Runs")
with open(filePath, "w") as f:
d = {
"current_run": self.currentRun,
"current_day": self.todayString,
"data": self.runs
}
json.dump(d, f, indent=4)
except Exception as e:
print(e)
rts = RuntimeStats()
rts.loadDataFromFile()
rts.saveDataToFile()
def reboot(reason="Rebooting for config file update"):
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
logger.info(reason)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
def checkFileExist(filename):
path = "/var/user/files"
if not os.path.exists(path):
logger.info("no folder making files folder in var/user")
os.makedirs(path)
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
if not os.path.exists(path + "/" + filename):
logger.info("no creds file making creds file")
with open(path + "/" + filename, "a") as f:
json.dump({}, f)
def convertDStoJSON(ds):
j = dict()
for x in ds:
j[x["key"]] = x["value"]
return j
def convertJSONtoDS(j):
d = []
for key in j.keys():
d.append({"key": key, "value": j[key]})
return d
def checkCredentialConfig():
logger.info("CHECKING CONFIG")
cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg"
credspath = "/var/user/files/creds.json"
cfg = dict()
with open(cfgpath, "r") as f:
cfg = json.load(f)
clouds = cfg.get("clouds")
logger.info(clouds)
#if not configured then try to configure from stored values
if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown":
checkFileExist("creds.json")
with open(credspath, "r") as c:
creds = json.load(c)
if creds:
logger.info("updating config with stored data")
clouds[0]["args"]["clientId"] = creds["clientId"]
clouds[0]["args"]["username"] = creds["userName"]
clouds[0]["args"]["passwd"] = creds["password"]
cfg["clouds"] = clouds
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
reboot()
else:
#assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data
checkFileExist("creds.json")
with open(credspath, "r") as c:
logger.info("updating stored file with new data")
cfg = checkParameterConfig(cfg)
with open(cfgpath, "w", encoding='utf-8') as n:
json.dump(cfg, n, indent=1, ensure_ascii=False)
creds = json.load(c)
if creds:
if creds["clientId"] != clouds[0]["args"]["clientId"]:
creds["clientId"] = clouds[0]["args"]["clientId"]
if creds["userName"] != clouds[0]["args"]["username"]:
creds["userName"] = clouds[0]["args"]["username"]
if creds["password"] != clouds[0]["args"]["passwd"]:
creds["password"] = clouds[0]["args"]["passwd"]
else:
creds["clientId"] = clouds[0]["args"]["clientId"]
creds["userName"] = clouds[0]["args"]["username"]
creds["password"] = clouds[0]["args"]["passwd"]
with open(credspath, "w") as cw:
json.dump(creds,cw)
def checkParameterConfig(cfg):
logger.info("Checking Parameters!!!!")
paramspath = "/var/user/files/params.json"
cfgparams = convertDStoJSON(cfg.get("labels"))
#check stored values
checkFileExist("params.json")
with open(paramspath, "r") as f:
logger.info("Opened param storage file")
params = json.load(f)
if params:
if cfgparams != params:
#go through each param
#if not "unknown" and cfg and params aren't the same take from cfg likely updated manually
#if key in cfg but not in params copy to params
logger.info("equalizing params between cfg and stored")
for key in cfgparams.keys():
try:
if cfgparams[key] != params[key] and cfgparams[key] != "unknown":
params[key] = cfgparams[key]
except:
params[key] = cfgparams[key]
cfg["labels"] = convertJSONtoDS(params)
_set_global_args(convertJSONtoDS(params))
with open(paramspath, "w") as p:
json.dump(params, p)
else:
with open(paramspath, "w") as p:
logger.info("initializing param file with params in memory")
json.dump(convertDStoJSON(get_params()), p)
cfg["labels"] = get_params()
return cfg
lwtData = {
"init":False,
"client": client.Client(client_id=str(uuid.uuid4()), clean_session=True, userdata=None, protocol=client.MQTTv311, transport="tcp")
}
def lwt(mac):
try:
#if not lwtData["connected"]:
if not lwtData["init"]:
logger.info("INITIALIZING LWT CLIENT")
lwtData["client"].username_pw_set(username="admin", password="columbus")
lwtData["client"].will_set("meshify/db/194/_/mainHP/" + mac + ":00:00/connected",json.dumps({"value":False}))
lwtData["init"] = True
logger.info("Connecting to MQTT Broker for LWT purposes!!!!!!!")
lwtData["client"].connect("mq194.imistaway.net",1883, 600)
lwtData["client"].publish("meshify/db/194/_/mainHP/" + mac + ":00:00/connected", json.dumps({"value":True}))
except Exception as e:
logger.error("LWT DID NOT DO THE THING")
logger.error(e)
def sendData(message):
#logger.debug(message)
mac = __topic__.split("/")[-1] #':'.join(re.findall('..', '%012x' % uuid.getnode()))
lwt(mac)
checkCredentialConfig()
for measure in message["measures"]:
try:
logger.debug(measure)
now = (round(dt.timestamp(dt.now())/600)*600)
if abs(now - measure["timestamp"]) > 7200:
reboot(reason="Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}".format(now,measure["timestamp"]))
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault"]:
logger.debug("Converting DINT/BOOL to STRING")
value = convert_int(measure["name"], measure["value"])
logger.debug("Converted {} to {}".format(measure["value"], value))
publish(__topic__ + ":01:99/" + measure["name"], json.dumps({"value": value}), __qos__)
else:
publish(__topic__ + ":01:99/" + measure["name"], json.dumps({"value": measure["value"]}), __qos__)
if measure["name"] == "wellstatus":
if measure["value"] == 0 and not rts.runs[rts.todayString]["run_" + str(rts.currentRun)]["start"]:
rts.startRun()
rts.saveDataToFile()
elif measure["value"] > 0 and rts.runs[rts.todayString]["run_" + str(rts.currentRun)]["start"] and not rts.runs[rts.todayString]["run_" + str(rts.currentRun)]["end"]:
rts.endRun()
rts.saveDataToFile()
publish(__topic__ + ":01:99/" + "percentRunTime30Days", json.dumps({"value": rts.calculateRunPercentMultiDay()}), __qos__)
if measure["name"] == "vfdfrequency":
if measure["value"] > 0:
rts.addHertzDataPoint(val)
rts.saveDataToFile()
publish(__topic__ + ":01:99/" + "avgFrequency30Days", json.dumps({"value": rts.calculateAverageHertzMultiDay()}), __qos__)
except Exception as e:
logger.error(e)
#publish(__topic__, json.dumps({measure["name"]: measure["value"]}), __qos__)
def convert_int(plc_tag, value):
well_status_codes = {
0: "Running",
1: "Pumped Off",
2: "Alarmed",
3: "Locked Out",
4: "Stopped"
}
pid_control_codes = {
0: "Flow",
1: "Fluid Level",
2: "Tubing Pressure",
3: "Manual"
}
downhole_codes = {
0: "OK",
1: "Connecting",
2: "Open Circuit",
3: "Shorted",
4: "Cannot Decode"
}
permissive_codes = {
0: "OK",
1: "Flow",
2: "Intake Pressure",
3: "Intake Temperature",
4: "Tubing Pressure",
5: "VFD",
6: "Fluid Level",
7: "Min. Downtime"
}
alarm_codes = {
0: "OK",
1: "Alarm"
}
alarm_vfd_codes = {
0: "OK",
1: "Locked Out"
}
vfd_fault_codes = {
0: "No Fault",
2: "Auxiliary Input",
3: "Power Loss",
4: "UnderVoltage",
5: "OverVoltage",
7: "Motor Overload",
8: "Heatsink OverTemp",
9: "Thermister OverTemp",
10: "Dynamic Brake OverTemp",
12: "Hardware OverCurrent",
13: "Ground Fault",
14: "Ground Warning",
15: "Load Loss",
17: "Input Phase Loss",
18: "Motor PTC Trip",
19: "Task Overrun",
20: "Torque Prove Speed Band",
21: "Output Phase Loss",
24: "Decel Inhibit",
25: "OverSpeed Limit",
26: "Brake Slipped",
27: "Torque Prove Conflict",
28: "TP Encls Confict",
29: "Analog In Loss",
33: "Auto Restarts Exhausted",
35: "IPM OverCurrent",
36: "SW OverCurrent",
38: "Phase U to Ground",
39: "Phase V to Ground",
40: "Phase W to Ground",
41: "Phase UV Short",
42: "Phase VW Short",
43: "Phase WU Short",
44: "Phase UNeg to Ground",
45: "Phase VNeg to Ground",
46: "Phase WNeg to Ground",
48: "System Defaulted",
49: "Drive Powerup",
51: "Clear Fault Queue",
55: "Control Board Overtemp",
59: "Invalid Code",
61: "Shear Pin 1",
62: "Shear Pin 2",
64: "Drive Overload",
66: "OW Torque Level",
67: "Pump Off",
71: "Port 1 Adapter",
72: "Port 2 Adapter",
73: "Port 3 Adapter",
74: "Port 4 Adapter",
75: "Port 5 Adapter",
76: "Port 6 Adapter",
77: "IR Volts Range",
78: "FluxAmps Ref Range",
79: "Excessive Load",
80: "AutoTune Aborted",
81: "Port 1 DPI Loss",
82: "Port 2 DPI Loss",
83: "Port 3 DPI Loss",
84: "Port 4 DPI Loss",
85: "Port 5 DPI Loss",
86: "Port 6 DPI Loss",
87: "IXo Voltage Range",
91: "Primary Velocity Feedback Loss",
93: "Hardware Enable Check",
94: "Alternate Velocity Feedback Loss",
95: "Auxiliary Velocity Feedback Loss",
96: "Position Feedback Loss",
97: "Auto Tach Switch",
100: "Parameter Checksum",
101: "Power Down NVS Blank",
102: "NVS Not Blank",
103: "Power Down NVS Incompatible",
104: "Power Board Checksum",
106: "Incompat MCB-PB",
107: "Replaced MCB-PB",
108: "Analog Calibration Checksum",
110: "Invalid Power Board Data",
111: "Power Board Invalid ID",
112: "Power Board App Min Version",
113: "Tracking DataError",
115: "Power Down Table Full",
116: "Power Down Entry Too Large",
117: "Power Down Data Checksum",
118: "Power Board Power Down Checksum",
124: "App ID Changed",
125: "Using Backup App",
134: "Start on Power Up",
137: "External Precharge Error",
138: "Precharge Open",
141: "Autotune Enc Angle",
142: "Autotune Speed Restricted",
143: "Autotune Current Regulator",
144: "Autotune Inertia",
145: "Autotune Travel",
13035: "Net IO Timeout",
13037: "Net IO Timeout"
}
plc_tags = {
"wellstatus": well_status_codes.get(value, "Invalid Code"),
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
"runpermissive": permissive_codes.get(value, "Invalid Code"),
"startpermissive": permissive_codes.get(value, "Invalid Code"),
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code")
}
return plc_tags.get(plc_tag, "Invalid Tag")

View File

@@ -0,0 +1,221 @@
# Enter your python code.
import json
from quickfaas.measure import recall
from common.Logger import logger
def sync(mac,value, wizard_api):
#get new values and send
try:
data = recall()#json.loads(recall().decode("utf-8"))
except Exception as e:
logger.error(e)
logger.info(data)
for controller in data:
for measure in controller["measures"]:
#publish measure
topic = "meshify/db/194/_/advvfdipp/" + mac + "/" + measure["name"]
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault"]:
payload = [{"value": convert_int(measure["name"], measure["value"])}]
else:
payload = [{"value": measure["value"]}]
logger.debug("Sending on topic: {}".format(topic))
logger.debug("Sending value: {}".format(payload))
wizard_api.mqtt_publish(topic, json.dumps(payload))
def writeplctag(mac, value, wizard_api):
try:
value = json.loads(value.replace("'",'"'))
logger.debug(value)
message = {"advvfdipp":{value["tag"]: value["val"]}}
wizard_api.write_plc_values(message)
except Exception as e:
logger.debug(e)
def receiveCommand(topic, payload, wizard_api):
logger.debug(topic)
logger.debug(json.loads(payload))
p = json.loads(payload)[0]
command = p["payload"]["name"].split(".")[1]
commands = {
"sync": sync,
"writeplctag": writeplctag,
}
commands[command](p["mac"].lower(),p["payload"]["value"], wizard_api)
#logger.debug(command)
ack(p["msgId"], p["mac"], command, p["payload"]["name"].split(".")[1], p["payload"]["value"], wizard_api)
def ack(msgid, mac, name, command, value, wizard_api):
#logger.debug(mac)
macsquish = "".join(mac.split(":")[:-2])
maclower = ":".join(mac.split(":")[:-2])
maclower = maclower.lower()
#logger.debug(msgid)
#logger.debug(mac)
#logger.debug(name)
#logger.debug(value)
wizard_api.mqtt_publish("meshify/responses/" + str(msgid), json.dumps([{"value": "{} Success Setting: {} To: {}".format(macsquish,name, value), "msgid": str(msgid)}]))
wizard_api.mqtt_publish("meshify/db/194/_/mainMeshify/" + maclower + ":00:00/commands", json.dumps([{"value": {"status": "success", "value": str(value), "channel": command}, "msgid": str(msgid)}]))
def convert_int(plc_tag, value):
well_status_codes = {
0: "Running",
1: "Pumped Off",
2: "Alarmed",
3: "Locked Out",
4: "Stopped"
}
pid_control_codes = {
0: "Flow",
1: "Fluid Level",
2: "Tubing Pressure",
3: "Manual"
}
downhole_codes = {
0: "OK",
1: "Connecting",
2: "Open Circuit",
3: "Shorted",
4: "Cannot Decode"
}
permissive_codes = {
0: "OK",
1: "Flow",
2: "Intake Pressure",
3: "Intake Temperature",
4: "Tubing Pressure",
5: "VFD",
6: "Fluid Level",
7: "Min. Downtime"
}
alarm_codes = {
0: "OK",
1: "Alarm"
}
alarm_vfd_codes = {
0: "OK",
1: "Locked Out"
}
vfd_fault_codes = {
0: "No Fault",
2: "Auxiliary Input",
3: "Power Loss",
4: "UnderVoltage",
5: "OverVoltage",
7: "Motor Overload",
8: "Heatsink OverTemp",
9: "Thermister OverTemp",
10: "Dynamic Brake OverTemp",
12: "Hardware OverCurrent",
13: "Ground Fault",
14: "Ground Warning",
15: "Load Loss",
17: "Input Phase Loss",
18: "Motor PTC Trip",
19: "Task Overrun",
20: "Torque Prove Speed Band",
21: "Output Phase Loss",
24: "Decel Inhibit",
25: "OverSpeed Limit",
26: "Brake Slipped",
27: "Torque Prove Conflict",
28: "TP Encls Confict",
29: "Analog In Loss",
33: "Auto Restarts Exhausted",
35: "IPM OverCurrent",
36: "SW OverCurrent",
38: "Phase U to Ground",
39: "Phase V to Ground",
40: "Phase W to Ground",
41: "Phase UV Short",
42: "Phase VW Short",
43: "Phase WU Short",
44: "Phase UNeg to Ground",
45: "Phase VNeg to Ground",
46: "Phase WNeg to Ground",
48: "System Defaulted",
49: "Drive Powerup",
51: "Clear Fault Queue",
55: "Control Board Overtemp",
59: "Invalid Code",
61: "Shear Pin 1",
62: "Shear Pin 2",
64: "Drive Overload",
66: "OW Torque Level",
67: "Pump Off",
71: "Port 1 Adapter",
72: "Port 2 Adapter",
73: "Port 3 Adapter",
74: "Port 4 Adapter",
75: "Port 5 Adapter",
76: "Port 6 Adapter",
77: "IR Volts Range",
78: "FluxAmps Ref Range",
79: "Excessive Load",
80: "AutoTune Aborted",
81: "Port 1 DPI Loss",
82: "Port 2 DPI Loss",
83: "Port 3 DPI Loss",
84: "Port 4 DPI Loss",
85: "Port 5 DPI Loss",
86: "Port 6 DPI Loss",
87: "IXo Voltage Range",
91: "Primary Velocity Feedback Loss",
93: "Hardware Enable Check",
94: "Alternate Velocity Feedback Loss",
95: "Auxiliary Velocity Feedback Loss",
96: "Position Feedback Loss",
97: "Auto Tach Switch",
100: "Parameter Checksum",
101: "Power Down NVS Blank",
102: "NVS Not Blank",
103: "Power Down NVS Incompatible",
104: "Power Board Checksum",
106: "Incompat MCB-PB",
107: "Replaced MCB-PB",
108: "Analog Calibration Checksum",
110: "Invalid Power Board Data",
111: "Power Board Invalid ID",
112: "Power Board App Min Version",
113: "Tracking DataError",
115: "Power Down Table Full",
116: "Power Down Entry Too Large",
117: "Power Down Data Checksum",
118: "Power Board Power Down Checksum",
124: "App ID Changed",
125: "Using Backup App",
134: "Start on Power Up",
137: "External Precharge Error",
138: "Precharge Open",
141: "Autotune Enc Angle",
142: "Autotune Speed Restricted",
143: "Autotune Current Regulator",
144: "Autotune Inertia",
145: "Autotune Travel",
13035: "Net IO Timeout",
13037: "Net IO Timeout"
}
plc_tags = {
"wellstatus": well_status_codes.get(value, "Invalid Code"),
"pidcontrolmode": pid_control_codes.get(value, "Invalid Code"),
"downholesensorstatus": downhole_codes.get(value, "Invalid Code"),
"alarmflowrate": alarm_codes.get(value, "Invalid Code"),
"alarmintakepressure": alarm_codes.get(value, "Invalid Code"),
"alarmintaketemperature": alarm_codes.get(value, "Invalid Code"),
"alarmtubingpressure": alarm_codes.get(value, "Invalid Code"),
"alarmvfd": alarm_codes.get(value, "Invalid Code"),
"alarmlockout": alarm_vfd_codes.get(value, "Invalid Code"),
"alarmfluidlevel": alarm_codes.get(value, "Invalid Code"),
"runpermissive": permissive_codes.get(value, "Invalid Code"),
"startpermissive": permissive_codes.get(value, "Invalid Code"),
"last_vfd_fault_code": vfd_fault_codes.get(value, "Invalid Code"),
"vfd_fault": vfd_fault_codes.get(value, "Invalid Code")
}
return plc_tags.get(plc_tag, "Invalid Tag")

View File

@@ -697,66 +697,6 @@
"gain": "",
"offset": ""
},
{
"name": "hartnettotal",
"ctrlName": "advvfdipp",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "in_HART_Flowmeter_Net",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "hartfwdtotal",
"ctrlName": "advvfdipp",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "in_HART_Flowmeter_Fwd",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"name": "hartrevtotal",
"ctrlName": "advvfdipp",
"group": "default",
"uploadType": "periodic",
"dataType": "FLOAT",
"addr": "in_HART_Flowmeter_Rev",
"decimal": 2,
"len": 1,
"readWrite": "ro",
"unit": "",
"desc": "",
"transformType": 0,
"maxValue": "",
"minValue": "",
"maxScaleValue": "",
"minScaleValue": "",
"gain": "",
"offset": ""
},
{
"ctrlName": "advvfdipp",
"dataType": "BIT",

File diff suppressed because one or more lines are too long

View File

@@ -6,9 +6,10 @@ from quickfaas.remotebus import publish
from quickfaas.global_dict import get as get_params
from quickfaas.global_dict import _set_global_args
def reboot():
def reboot(reason="Rebooting for config file update"):
#basic = Basic()
logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10)
logger.info(reason)
r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read()
logger.info(f"REBOOT : {r}")
@@ -126,6 +127,8 @@ def sendData(message):
for measure in message["measures"]:
try:
logger.debug(measure)
if abs(payload["ts"]/1000 - measure["timestamp"]) > 7200:
reboot(reason="Poll timestamp and actual timestamp out of sync. Actual: {} Poll: {}".format(payload["ts"]/1000,measure["timestamp"]))
if measure["name"] in ["wellstatus","pidcontrolmode","downholesensorstatus","alarmflowrate","alarmintakepressure","alarmintaketemperature","alarmtubingpressure","alarmvfd","alarmlockout","alarmfluidlevel","runpermissive","startpermissive","last_vfd_fault_code","vfd_fault", "flowmeter_fault"]:
logger.debug("Converting DINT/BOOL to STRING")
value = convert_int(measure["name"], measure["value"])

File diff suppressed because one or more lines are too long

View File

@@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
@@ -12,15 +12,15 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"root = \"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub/\"\n",
"devicetype = \"dual_flowmeter\"\n",
"platform = \"mistaway\" #\"thingsboard\"\n",
"root = \"/Users/nico/Documents/GitHub/HP_InHand_IG502/Pub_Sub\"\n",
"devicetype = \"advvfdipp\"\n",
"platform = \"thingsboard\" #\"mistaway\"\n",
"platform_short = \"tb\" if platform == \"thingsboard\" else \"ma\" \n",
"startversion = 0\n",
"startversion = 2\n",
"deviceconfig = devicetype + \"_\" + platform_short + \"_\" +\"v\" + str(startversion) + \".cfg\"\n"
]
},
@@ -37,7 +37,7 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
@@ -65,7 +65,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.5"
"version": "3.10.5 | packaged by conda-forge | (main, Jun 14 2022, 07:05:37) [Clang 13.0.1 ]"
},
"orig_nbformat": 4,
"vscode": {

View File

@@ -37,10 +37,12 @@ def write_config(path, config, pubDir, subDir):
checkFileExist(path)
with open(path, "w") as f:
if pubDir:
checkFolderExist(pubDir)
with os.scandir(pubDir) as it:
for ind, entry in enumerate(it):
config["quickfaas"]["uploadFuncs"][ind]["script"] = code_to_string(entry.path)
if subDir:
checkFolderExist(subDir)
with os.scandir(subDir) as it:
for ind, entry in enumerate(it):
config["quickfaas"]["downloadFuncs"][ind]["script"] = code_to_string(entry.path)
@@ -55,4 +57,8 @@ def checkFileExist(path):
os.makedirs("/".join(path.split("/")[:-1]))
open(path, "a").close()
if not os.path.exists(path):
open(path, "a").close()
open(path, "a").close()
def checkFolderExist(path):
if not os.path.exists(path):
os.makedirs(path)

307
default w reboot.cnf Normal file
View File

@@ -0,0 +1,307 @@
!
#system config
language English
hostname EdgeGateway
ip domain-name edgegateway.com.cn
clock timezone CST6CDT,M3.2.0/2,M11.1.0/2
service password-encryption
!
#log config
log persistent severity 7
!
#user config
username admin privilege 15 password $1$4uCjyOgw$HNQuju4dv/YWzZ/S7hgbj0
!
enable password $1$kvwao0Sc$zzKGmDaqupdkXYaaObwJx/
!
#aaa config
!
#cron config
chronos reboot every day 3 5
remote-login retry 20
!
#alarm config
!
#loopback config
interface loopback 1
ip address 127.0.0.1 255.0.0.0
!
!
#ethernet interface config
interface fastethernet 0/1
track l2-state
ip address dhcp
!
interface fastethernet 0/2
!
!
#Ethernet sub interface config
!
#cellular config
cellular 1 gsm profile 1 11166.mcs use-blank-dialno auto
cellular 1 gsm profile 2 ne01.vzwstatic use-blank-dialno auto
cellular 1 gsm profile 3 10569.mcs use-blank-dialno auto
cellular 1 dial interval 10
cellular 1 signal interval 120
cellular 1 network auto
cellular 1 dual-sim enable
cellular 1 dual-sim main 1
cellular 1 dual-sim policy redial 5
cellular 1 sms mode text
cellular 1 sms interval 30
!
interface cellular 1
dialer profile 1
dialer profile 2 secondary
dialer timeout 120
dialer activate auto
ip address negotiated
ip mru 1500
ip mtu 1500
ppp ipcp dns request
ppp keepalive 55 5
!
!
#dot11 config
interface dot11radio 1
ip address dhcp
station-role workgroup-bridge
radio-type dot11bgn
client-mode ssid Henry@s#Pump
client-mode authentication-method wpa 2
client-mode cipher-suite ccmp key ascii $AES$861CE7AACCECC4CAF7EFD9A308A655E3
no client-mode default-route
!
!
#bridge config
!
bridge 1
!
interface bridge 1
ip address 192.168.1.1 255.255.255.0
!
interface fastethernet 0/2
bridge-group 1
!
!
!
#dialer config
!
#ipsec config
crypto ipsec-daemon stop
!
#l2tp config
!
#openvpn config
!
#openvpn server config
!
#web config
ip web-access timeout 300
ip https access enable
!
#device-manager config
device-manager enable
device-manager server iot.inhandnetworks.com
device-manager account nmelone@henry-pump.com
device-manager location cellular
!
#InConnect config
!
#Erlang config
erlang mode off
!
#telnet server config
no ip telnet server
no ip telnet access enable
!
#ssh server config
!
#dhcp-relay config
!
#dns-relay config
ip dns-relay server
!
#dhcp server config
interface bridge 1
ip dhcp-server enable
ip dhcp-server range 192.168.1.2 192.168.1.254
ip dhcp-server lease 1440
!
!
#dns config
!
#ip host config
!
#ddns config
!
#snmp config
#ovdp config
!
!
#dockerd config
!
#portainer config
!
#Azure IoT Edge config
!
#AWS IoT Greengrass config
!
#ntp server config
ntp master 2
ntp server 0.pool.ntp.org
ntp server 1.pool.ntp.org
ntp server 2.pool.ntp.org
ntp server 3.pool.ntp.org
!
!
#Telegraf config
!
#email config
!
#sntp client config
sntp-client
sntp-client server 0.pool.ntp.org port 123
sntp-client server 1.pool.ntp.org port 123
sntp-client server 2.pool.ntp.org port 123
sntp-client server 3.pool.ntp.org port 123
!
#gre config
!
#static route config
ip route 0.0.0.0 0.0.0.0 cellular 1 253
ip route 0.0.0.0 0.0.0.0 dot11radio 1 250
ip route 0.0.0.0 0.0.0.0 fastethernet 0/1 255
!
#rip config
!
!
#ospf config
!
!
#filtering config
!
#bgp config
!
!
#firewall config
!
!
interface cellular 1
ip access-group 192 admin
!
access-list 100 10 permit ip any any
access-list 101 10 permit ip any any
access-list 102 10 permit ip any any
access-list 192 10 permit tcp any any eq 443 log
access-list 192 20 deny tcp any any eq 80
access-list 192 30 deny tcp any any eq 23
access-list 192 40 deny tcp any any eq 22
access-list 192 50 deny tcp any any eq 53
access-list 192 60 deny udp any any eq 53
interface cellular 1
ip nat outside
!
interface bridge 1
ip nat inside
!
interface fastethernet 0/1
ip nat outside
!
interface dot11radio 1
ip nat outside
!
!
ip snat inside list 101 interface fastethernet 0/1
ip snat inside list 100 interface cellular 1
ip snat inside list 102 interface dot11radio 1
ip dnat outside static tcp interface cellular 1 44818 192.168.1.10 44818 description PLC
ip dnat outside static tcp interface cellular 1 5900 192.168.1.11 5900 description VNC
!
!
!
!
!
#tcp mss config
ip tcp adjust-mss 1360
!
!
!
#netwatcher config
!
!
#sla config
!
#track config
!
#vrrpd config
!
#backup config
!
#mroute config
!
#cert config
!
!
#cert enroll config
crypto key generate rsa general-keys modulus 1024
!
!
#dls config
!
#gps config
gps enable
!
#gps serial config
!
#gps server config
!
#gps client config
!
#Serial Configuration
serial 1
!
serial 2
!
!
#python config
python enable
python log username adm password $AES$BFA541FA10FA3B041CBA4412D12C52B8
python app 1 on
python appcmd 1 logsize 1
quit
!
python app 2 on
python appcmd 2 logsize 1 2
device_supervisor
quit
!
!
#Modbus IO Configuration
!
#traffic-stated config
interface cellular 1
traffic-stated statistic
!
!
#data-usage config
data-usage sim1
daily units MB
daily limit 35
monthly units GB
monthly limit 1
!
interface cellular 1
traffic-stated sim 1 data-usage sim1
traffic-stated sim 1 daily-action only-report
traffic-stated sim 1 monthly-action only-report
traffic-stated sim 1 monitoring
!
#end of configuration