updates 2024-07-31
This commit is contained in:
73
code snippets/alarmList.ipynb
Normal file
73
code snippets/alarmList.ipynb
Normal file
@@ -0,0 +1,73 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import json\n",
|
||||
"from openpyxl import Workbook\n",
|
||||
"\n",
|
||||
"# Load the JSON data\n",
|
||||
"with open('/Users/nico/Documents/GitHub/thingsboard_vc/device_profile/836174d0-44f2-11ed-b441-bfcc3466332b.json') as f:\n",
|
||||
" data = json.load(f)\n",
|
||||
"\n",
|
||||
"# Create a new Excel workbook\n",
|
||||
"wb = Workbook()\n",
|
||||
"ws = wb.active\n",
|
||||
"\n",
|
||||
"# Set the column headers\n",
|
||||
"ws['A1'] = 'Alarm Type'\n",
|
||||
"ws['B1'] = 'PLC Tag'\n",
|
||||
"ws['C1'] = 'Priority'\n",
|
||||
"ws['D1'] = 'Output'\n",
|
||||
"\n",
|
||||
"# Iterate through the alarms and create rows in the spreadsheet\n",
|
||||
"row_num = 2\n",
|
||||
"for alarm in data['entity']['profileData']['alarms']:\n",
|
||||
" alarm_type = alarm['alarmType']\n",
|
||||
" plc_tag = alarm['clearRule']['condition']['condition'][0]['key']['key']\n",
|
||||
" priority_key = next(iter(alarm['createRules']))\n",
|
||||
" priority = priority_key.upper()\n",
|
||||
" output_map = {\n",
|
||||
" 'CRITICAL': 'Voice, SMS, Email',\n",
|
||||
" 'MAJOR': 'SMS, Email',\n",
|
||||
" 'MINOR': 'Email'\n",
|
||||
" }\n",
|
||||
" output = output_map.get(priority, '')\n",
|
||||
"\n",
|
||||
" ws[f'A{row_num}'] = alarm_type\n",
|
||||
" ws[f'B{row_num}'] = plc_tag\n",
|
||||
" ws[f'C{row_num}'] = priority\n",
|
||||
" ws[f'D{row_num}'] = output\n",
|
||||
"\n",
|
||||
" row_num += 1\n",
|
||||
"\n",
|
||||
"# Save the Excel file\n",
|
||||
"wb.save('/Users/nico/Desktop/HR Tank Battery Alarms.xlsx')"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "xlxs",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.1.-1"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
77
code snippets/base_receiveCommand.py
Normal file
77
code snippets/base_receiveCommand.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import json, time
|
||||
from datetime import datetime as dt
|
||||
from quickfaas.measure import recall, write
|
||||
from quickfaas.remotebus import publish
|
||||
from common.Logger import logger
|
||||
|
||||
# Helper function to split the payload into chunks
|
||||
def chunk_payload(payload, chunk_size=20):
|
||||
chunked_values = list(payload["values"].items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
"ts": payload["ts"],
|
||||
"values": dict(chunked_values[i:i+chunk_size])
|
||||
}
|
||||
|
||||
def sync():
|
||||
#get new values and send
|
||||
payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}}
|
||||
topic = "v1/devices/me/telemetry"
|
||||
try:
|
||||
data = recall()#json.loads(recall().decode("utf-8"))
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
logger.debug(data)
|
||||
for controller in data:
|
||||
for measure in controller["measures"]:
|
||||
#publish measure
|
||||
payload["values"][measure["name"]] = measure["value"]
|
||||
logger.debug("Sending on topic: {}".format(topic))
|
||||
logger.debug("Sending value: {}".format(payload))
|
||||
for chunk in chunk_payload(payload=payload):
|
||||
publish(topic, json.dumps(chunk), 1)
|
||||
time.sleep(2)
|
||||
def writeplctag(value):
|
||||
#value in the form {"measurement": <measurement_name>, "value": <value to write>}
|
||||
try:
|
||||
#value = json.loads(value.replace("'",'"'))
|
||||
logger.debug(value)
|
||||
#payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}]
|
||||
message = [{"name": "advvfdipp", "measures":[{"name":value["measurement"], "value": value["value"]}]}]
|
||||
resp = write(message)
|
||||
logger.debug("RETURN FROM WRITE: {}".format(resp))
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.debug(e)
|
||||
return False
|
||||
|
||||
def receiveCommand(topic, payload):
|
||||
try:
|
||||
logger.debug(topic)
|
||||
logger.debug(json.loads(payload))
|
||||
p = json.loads(payload)
|
||||
command = p["method"]
|
||||
commands = {
|
||||
"sync": sync,
|
||||
"writeplctag": writeplctag,
|
||||
}
|
||||
if command == "setPLCTag":
|
||||
try:
|
||||
result = commands["writeplctag"](p["params"])
|
||||
logger.debug(result)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
#logger.debug(command)
|
||||
ack(topic.split("/")[-1])
|
||||
time.sleep(5)
|
||||
sync()
|
||||
except Exception as e:
|
||||
logger.debug(e)
|
||||
|
||||
|
||||
def ack(msgid):
|
||||
#logger.debug(msgid)
|
||||
#logger.debug(mac)
|
||||
#logger.debug(name)
|
||||
#logger.debug(value)
|
||||
publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""}), 1)
|
||||
1267
code snippets/datapoints.json
Normal file
1267
code snippets/datapoints.json
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
18
code snippets/mobotix_example.py
Normal file
18
code snippets/mobotix_example.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from requests.adapters import HTTPAdapter, Retry
|
||||
from requests.auth import HTTPDigestAuth, HTTPBasicAuth
|
||||
from requests.exceptions import ConnectionError, RetryError
|
||||
import time, requests
|
||||
with open('./snapshot.jpg', 'wb') as handle:
|
||||
with requests.Session() as s:
|
||||
retries = Retry(total = 3, backoff_factor=0.1, status_forcelist=[404,408, 500, 502, 503, 504])
|
||||
s.mount('http://', HTTPAdapter(max_retries=retries))
|
||||
resp = ""
|
||||
try:
|
||||
resp = s.get("http://192.168.1.97/cgi-bin/jpg/image.cgi?id=2.1212121212", auth=HTTPDigestAuth("admin", "Amerus@1903"), stream=True)
|
||||
except RetryError as m:
|
||||
print(m)
|
||||
if resp:
|
||||
for block in resp.iter_content(1024):
|
||||
if not block:
|
||||
break
|
||||
handle.write(block)
|
||||
459
code snippets/rpiSimDevice.ipynb
Normal file
459
code snippets/rpiSimDevice.ipynb
Normal file
File diff suppressed because one or more lines are too long
129
code snippets/rpiSimDevice.py
Normal file
129
code snippets/rpiSimDevice.py
Normal file
@@ -0,0 +1,129 @@
|
||||
import paho.mqtt.client as mqtt
|
||||
import json
|
||||
import time
|
||||
import math
|
||||
import random
|
||||
# MQTT Broker settings
|
||||
MQTT_BROKER = "hp.henrypump.cloud"
|
||||
MQTT_TOPIC = "v1/devices/me/telemetry"
|
||||
# Data simulation settings
|
||||
DATA_INTERVAL = 10 * 60 # 10 minutes
|
||||
#SINE_WAVE_PERIOD = 3600 # 1 hour
|
||||
#RANDOM_MIN = 0
|
||||
#RANDOM_MAX = 100
|
||||
SHUTDOWN_DURATION = 30 * 60 # 30 minutes
|
||||
# Load data points configuration from JSON file
|
||||
with open('/Users/nico/Documents/GitHub/HP_InHand_IG502/code snippets/datapoints.json') as f:
|
||||
datapoints_config = json.load(f)
|
||||
# Shutdown state variables
|
||||
shutdown_active = False
|
||||
shutdown_start_time = 0
|
||||
# MQTT Client setup
|
||||
client = mqtt.Client(client_id="rpiSim")
|
||||
client.username_pw_set(username="henrypumptest2")
|
||||
client.connect(MQTT_BROKER)
|
||||
def reconnect_to_broker():
|
||||
if not client.is_connected():
|
||||
try:
|
||||
client.reconnect()
|
||||
except Exception as e:
|
||||
print(f"Error reconnecting to broker: {e}")
|
||||
time.sleep(1)
|
||||
def round_to_nearest_ten_minutes(ts):
|
||||
return ts - (ts % 600000) + 600000 * ((ts // 600000) % 10)
|
||||
"""
|
||||
def generate_sine_wave_value(t, period, amplitude=1, offset=0):
|
||||
return amplitude * math.sin(2 * math.pi * t / period) + offset
|
||||
"""
|
||||
def generate_sine_wave_value(datapoint):
|
||||
amplitude = datapoints_config[datapoint]['amplitude']
|
||||
frequency = datapoints_config[datapoint]['frequency']
|
||||
phase = random.uniform(0, .5)#datapoints_config[datapoint]['phase']
|
||||
offset = datapoints_config[datapoint]['offset']
|
||||
value = amplitude * math.sin((time.time() * frequency) + phase) + offset
|
||||
if shutdown_active:
|
||||
return max(0, value - (time.time() - shutdown_start_time) / 60)
|
||||
else:
|
||||
return value
|
||||
"""
|
||||
def generate_random_value(min_val, max_val):
|
||||
return random.uniform(min_val, max_val)
|
||||
"""
|
||||
def generate_random_value(datapoint):
|
||||
min_val = datapoints_config[datapoint]['min']
|
||||
max_val = datapoints_config[datapoint]['max']
|
||||
value = random.uniform(min_val, max_val)
|
||||
if shutdown_active:
|
||||
return max(0, value - (time.time() - shutdown_start_time) / 60)
|
||||
else:
|
||||
return value
|
||||
def constant_value(datapoint):
|
||||
return datapoints_config[datapoint]["value"]
|
||||
def get_shutdown_state():
|
||||
global shutdown_active
|
||||
global shutdown_start_time
|
||||
if shutdown_active:
|
||||
if time.time() - shutdown_start_time > SHUTDOWN_DURATION:
|
||||
shutdown_active = False
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
if random.random() < 0.01: # 1% chance of entering shutdown state
|
||||
shutdown_active = True
|
||||
shutdown_start_time = time.time()
|
||||
return False
|
||||
"""def get_datapoint_value(key, value_type):
|
||||
global shutdown_active
|
||||
if shutdown_active:
|
||||
if key == "value1":
|
||||
return max(0, value_type - (time.time() - shutdown_start_time) / 60)
|
||||
elif key == "value2":
|
||||
return False
|
||||
else:
|
||||
return 0
|
||||
else:
|
||||
if key == "value1":
|
||||
return generate_sine_wave_value(time.time(), SINE_WAVE_PERIOD)
|
||||
elif key == "value2":
|
||||
return generate_random_value(0, 1) > 0.5
|
||||
else:
|
||||
return generate_random_value(RANDOM_MIN, RANDOM_MAX)
|
||||
"""
|
||||
# Helper function to split the payload into chunks
|
||||
def chunk_payload(payload, chunk_size=20):
|
||||
chunked_values = list(payload["values"].items())
|
||||
for i in range(0, len(chunked_values), chunk_size):
|
||||
yield {
|
||||
"ts": payload["ts"],
|
||||
"values": dict(chunked_values[i:i+chunk_size])
|
||||
}
|
||||
while True:
|
||||
now = int(time.time())
|
||||
if now % 600 == 0:
|
||||
ts = now * 1000
|
||||
get_shutdown_state()
|
||||
"""
|
||||
values = {
|
||||
"value1": get_datapoint_value("value1", generate_sine_wave_value),
|
||||
"value2": get_datapoint_value("value2", generate_random_value),
|
||||
"value3": get_datapoint_value("value3", generate_random_value)
|
||||
}"""
|
||||
values = {}
|
||||
for datapoint in datapoints_config:
|
||||
if datapoints_config[datapoint]['type'] == 'sine_wave':
|
||||
values[datapoint] = generate_sine_wave_value(datapoint)
|
||||
elif datapoints_config[datapoint]['type'] == 'random':
|
||||
values[datapoint] = generate_random_value(datapoint)
|
||||
elif datapoints_config[datapoint]['type'] == 'constant':
|
||||
values[datapoint] = constant_value(datapoint)
|
||||
payload = {"ts": ts, "values": values}
|
||||
# Reconnect to broker if disconnected
|
||||
reconnect_to_broker()
|
||||
try:
|
||||
for chunk in chunk_payload(payload=payload):
|
||||
client.publish(MQTT_TOPIC, json.dumps(chunk))
|
||||
time.sleep(2)
|
||||
print(f"Published data at {ts}: {payload}")
|
||||
except Exception as e:
|
||||
print(f"ERROR in Publish: {e}")
|
||||
time.sleep(1)
|
||||
Reference in New Issue
Block a user