added flowmeterskid and started reports

This commit is contained in:
Nico Melone
2023-08-01 15:41:29 -05:00
parent 58415dfdd7
commit 70b2f7c763
32 changed files with 20292 additions and 28 deletions

BIN
.DS_Store vendored

Binary file not shown.

3
.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
.DS_Store
tb_report/frontend/node_modules/**

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -0,0 +1,299 @@
"""Define Meshify channel class."""
import time
from pycomm.ab_comm.clx import Driver as ClxDriver
from pycomm.cip.cip_base import CommError, DataError
from file_logger import filelogger as log
import minimalmodbus
minimalmodbus.BAUDRATE = 9600
minimalmodbus.STOPBITS = 1
TAG_DATAERROR_SLEEPTIME = 5
def binarray(intval):
"""Split an integer into its bits."""
bin_string = '{0:08b}'.format(intval)
bin_arr = [i for i in bin_string]
bin_arr.reverse()
return bin_arr
def read_tag(addr, tag, plc_type="CLX"):
"""Read a tag from the PLC."""
direct = plc_type == "Micro800"
clx = ClxDriver()
try:
if clx.open(addr, direct_connection=direct):
try:
val = clx.read_tag(tag)
clx.close()
return val
except DataError as err:
clx.close()
time.sleep(TAG_DATAERROR_SLEEPTIME)
log.error("Data Error during readTag({}, {}): {}".format(addr, tag, err))
except CommError:
# err = c.get_status()
log.error("Could not connect during readTag({}, {})".format(addr, tag))
except AttributeError as err:
clx.close()
log.error("AttributeError during readTag({}, {}): \n{}".format(addr, tag, err))
clx.close()
return False
def read_array(addr, tag, start, end, plc_type="CLX"):
"""Read an array from the PLC."""
direct = plc_type == "Micro800"
clx = ClxDriver()
if clx.open(addr, direct_connection=direct):
arr_vals = []
try:
for i in range(start, end):
tag_w_index = tag + "[{}]".format(i)
val = clx.read_tag(tag_w_index)
arr_vals.append(round(val[0], 4))
if arr_vals:
clx.close()
return arr_vals
else:
log.error("No length for {}".format(addr))
clx.close()
return False
except Exception:
log.error("Error during readArray({}, {}, {}, {})".format(addr, tag, start, end))
err = clx.get_status()
clx.close()
log.error(err)
clx.close()
def write_tag(addr, tag, val, plc_type="CLX"):
"""Write a tag value to the PLC."""
direct = plc_type == "Micro800"
clx = ClxDriver()
try:
if clx.open(addr, direct_connection=direct):
try:
initial_val = clx.read_tag(tag)
write_status = clx.write_tag(tag, val, initial_val[1])
clx.close()
return write_status
except DataError as err:
clx_err = clx.get_status()
clx.close()
log.error("--\nDataError during writeTag({}, {}, {}, plc_type={}) -- {}\n{}\n".format(addr, tag, val, plc_type, err, clx_err))
except CommError as err:
clx_err = clx.get_status()
log.error("--\nCommError during write_tag({}, {}, {}, plc_type={})\n{}\n--".format(addr, tag, val, plc_type, err))
clx.close()
return False
class Channel(object):
"""Holds the configuration for a Meshify channel."""
def __init__(self, mesh_name, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
"""Initialize the channel."""
self.mesh_name = mesh_name
self.data_type = data_type
self.last_value = None
self.value = None
self.last_send_time = 0
self.chg_threshold = chg_threshold
self.guarantee_sec = guarantee_sec
self.map_ = map_
self.write_enabled = write_enabled
def __str__(self):
"""Create a string for the channel."""
return "{}\nvalue: {}, last_send_time: {}".format(self.mesh_name, self.value, self.last_send_time)
def check(self, new_value, force_send=False):
"""Check to see if the new_value needs to be stored."""
send_needed = False
send_reason = ""
if self.data_type == 'BOOL' or self.data_type == 'STRING':
if self.last_send_time == 0:
send_needed = True
send_reason = "no send time"
elif self.value is None:
send_needed = True
send_reason = "no value"
elif self.value != new_value:
if self.map_:
if not self.value == self.map_[new_value]:
send_needed = True
send_reason = "value change"
else:
send_needed = True
send_reason = "value change"
elif (time.time() - self.last_send_time) > self.guarantee_sec:
send_needed = True
send_reason = "guarantee sec"
elif force_send:
send_needed = True
send_reason = "forced"
else:
if self.last_send_time == 0:
send_needed = True
send_reason = "no send time"
elif self.value is None:
send_needed = True
send_reason = "no value"
elif abs(self.value - new_value) > self.chg_threshold:
send_needed = True
send_reason = "change threshold"
elif (time.time() - self.last_send_time) > self.guarantee_sec:
send_needed = True
send_reason = "guarantee sec"
elif force_send:
send_needed = True
send_reason = "forced"
if send_needed:
self.last_value = self.value
if self.map_:
try:
self.value = self.map_[new_value]
except KeyError:
log.error("Cannot find a map value for {} in {} for {}".format(new_value, self.map_, self.mesh_name))
self.value = new_value
else:
self.value = new_value
self.last_send_time = time.time()
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
return send_needed
def read(self):
"""Read the value."""
pass
def identity(sent):
"""Return exactly what was sent to it."""
return sent
class ModbusChannel(Channel):
"""Modbus channel object."""
def __init__(self, mesh_name, register_number, data_type, chg_threshold, guarantee_sec, channel_size=1, map_=False, write_enabled=False, transform_fn=identity):
"""Initialize the channel."""
super(ModbusChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
self.mesh_name = mesh_name
self.register_number = register_number
self.channel_size = channel_size
self.data_type = data_type
self.last_value = None
self.value = None
self.last_send_time = 0
self.chg_threshold = chg_threshold
self.guarantee_sec = guarantee_sec
self.map_ = map_
self.write_enabled = write_enabled
self.transform_fn = transform_fn
def read(self, mbsvalue):
"""Return the transformed read value."""
return self.transform_fn(mbsvalue)
class PLCChannel(Channel):
"""PLC Channel Object."""
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False, plc_type='CLX'):
"""Initialize the channel."""
super(PLCChannel, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
self.plc_ip = ip
self.mesh_name = mesh_name
self.plc_tag = plc_tag
self.data_type = data_type
self.last_value = None
self.value = None
self.last_send_time = 0
self.chg_threshold = chg_threshold
self.guarantee_sec = guarantee_sec
self.map_ = map_
self.write_enabled = write_enabled
self.plc_type = plc_type
def read(self):
"""Read the value."""
plc_value = None
if self.plc_tag and self.plc_ip:
read_value = read_tag(self.plc_ip, self.plc_tag, plc_type=self.plc_type)
if read_value:
plc_value = read_value[0]
return plc_value
class BoolArrayChannels(Channel):
"""Hold the configuration for a set of boolean array channels."""
def __init__(self, ip, mesh_name, plc_tag, data_type, chg_threshold, guarantee_sec, map_=False, write_enabled=False):
"""Initialize the channel."""
super(BoolArrayChannels, self).__init__(mesh_name, data_type, chg_threshold, guarantee_sec, map_, write_enabled)
self.plc_ip = ip
self.mesh_name = mesh_name
self.plc_tag = plc_tag
self.data_type = data_type
self.last_value = None
self.value = None
self.last_send_time = 0
self.chg_threshold = chg_threshold
self.guarantee_sec = guarantee_sec
self.map_ = map_
self.write_enabled = write_enabled
def compare_values(self, new_val_dict):
"""Compare new values to old values to see if the values need storing."""
send = False
for idx in new_val_dict:
try:
if new_val_dict[idx] != self.last_value[idx]:
send = True
except KeyError:
log.error("Key Error in self.compare_values for index {}".format(idx))
send = True
return send
def read(self, force_send=False):
"""Read the value and check to see if needs to be stored."""
send_needed = False
send_reason = ""
if self.plc_tag:
val = read_tag(self.plc_ip, self.plc_tag)
if val:
bool_arr = binarray(val[0])
new_val = {}
for idx in self.map_:
try:
new_val[self.map_[idx]] = bool_arr[idx]
except KeyError:
log.error("Not able to get value for index {}".format(idx))
if self.last_send_time == 0:
send_needed = True
send_reason = "no send time"
elif self.value is None:
send_needed = True
send_reason = "no value"
elif self.compare_values(new_val):
send_needed = True
send_reason = "value change"
elif (time.time() - self.last_send_time) > self.guarantee_sec:
send_needed = True
send_reason = "guarantee sec"
elif force_send:
send_needed = True
send_reason = "forced"
if send_needed:
self.value = new_val
self.last_value = self.value
self.last_send_time = time.time()
log.info("Sending {} for {} - {}".format(self.value, self.mesh_name, send_reason))
return send_needed

View File

@@ -0,0 +1,8 @@
from Channel import PLCChannel
from flowmeterskid import PLC_IP_ADDRESS
tags = [
PLCChannel(PLC_IP_ADDRESS, "flowrate","Scaled_BPD_FlowRate","REAL", 100, 3600, plc_type="Micro800"),
PLCChannel(PLC_IP_ADDRESS, "totalizer_1","Lifetime_Flow_Meter_BBLS","REAL", 1000, 3600, plc_type="Micro800"),
PLCChannel(PLC_IP_ADDRESS, "volume_flow_units","SPT_Flow_Meter_Unit","STRING", 1000, 3600, plc_type="Micro800")
]

View File

@@ -0,0 +1,14 @@
{
"files": {
"file6": "persistence.py",
"file5": "file_logger.py",
"file4": "Channel.py",
"file3": "modbusMap.p",
"file2": "utilities.py",
"file1": "flowmeterskid.py"
},
"deviceName": "flowmeterskid",
"driverId": "0190",
"releaseVersion": "1",
"driverFileName": "flowmeterskid.py"
}

View File

@@ -0,0 +1,18 @@
"""Logging setup for PiFlow"""
import logging
from logging.handlers import RotatingFileHandler
import sys
log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s')
log_file = './PiFlow.log'
my_handler = RotatingFileHandler(log_file, mode='a', maxBytes=500*1024,
backupCount=2, encoding=None, delay=0)
my_handler.setFormatter(log_formatter)
my_handler.setLevel(logging.INFO)
filelogger = logging.getLogger('PiFlow')
filelogger.setLevel(logging.INFO)
filelogger.addHandler(my_handler)
console_out = logging.StreamHandler(sys.stdout)
console_out.setFormatter(log_formatter)
filelogger.addHandler(console_out)

View File

@@ -0,0 +1,209 @@
"""Driver for flowmeterskid."""
import threading
import json
import time
from random import randint
import os
from device_base import deviceBase
from Channel import PLCChannel, ModbusChannel,read_tag, write_tag, TAG_DATAERROR_SLEEPTIME
import persistence
from utilities import get_public_ip_address, get_private_ip_address
from file_logger import filelogger as logger
PLC_IP_ADDRESS = "192.168.1.12"
from Tags import tags
from datetime import datetime as dt
logger.info("flowmeterskid startup")
# GLOBAL VARIABLES
WAIT_FOR_CONNECTION_SECONDS = 20
IP_CHECK_PERIOD = 60
_ = None
CHANNELS = tags
class start(threading.Thread, deviceBase):
"""Start class required by Meshify."""
def __init__(self, name=None, number=None, mac=None, Q=None, mcu=None, companyId=None, offset=None, mqtt=None, Nodes=None):
"""Initialize the driver."""
threading.Thread.__init__(self)
deviceBase.__init__(self, name=name, number=number, mac=mac, Q=Q, mcu=mcu, companyId=companyId, offset=offset, mqtt=mqtt, Nodes=Nodes)
self.daemon = True
self.version = "1"
self.finished = threading.Event()
self.force_send = False
self.public_ip_address = ""
self.public_ip_address_last_checked = 0
self.private_ip_address = ""
self.ping_counter = 0
threading.Thread.start(self)
# this is a required function for all drivers, its goal is to upload some piece of data
# about your device so it can be seen on the web
def register(self):
"""Register the driver."""
# self.sendtodb("log", "BOOM! Booted.", 0)
pass
def run(self):
# pass
"""Actually run the driver."""
for i in range(0, WAIT_FOR_CONNECTION_SECONDS):
print("flowmeterskid driver will start in {} seconds".format(WAIT_FOR_CONNECTION_SECONDS - i))
time.sleep(1)
logger.info("BOOM! Starting flowmeterskid driver...")
self._check_ip_address()
self.nodes["flowmeterskid_0199"] = self
send_loops = 0
while True:
if self.force_send:
logger.warning("FORCE SEND: TRUE")
if int(time.time()) % 600 == 0 or self.force_send:
payload = {"ts": (round(dt.timestamp(dt.now())/600)*600)*1000, "values": {}}
resetPayload = {"ts": "", "values": {}}
dayReset, weekReset, monthReset, yearReset = False, False, False, False
for chan in CHANNELS:
val = chan.read()
try:
if chan in ["totalizer_1"]:
payload["values"]["day_volume"], dayReset = self.totalizeDay(val)
payload["values"]["week_volume"], weekReset = self.totalizeWeek(val)
payload["values"]["month_volume"], monthReset = self.totalizeMonth(val)
payload["values"]["year_volume"], yearReset = self.totalizeYear(val)
payload["values"][chan.mesh_name] = val
except Exception as e:
logger.error(e)
self.sendToTB(json.dumps(payload))
if dayReset:
resetPayload["values"]["yesterday_volume"] = payload["values"]["day_volume"]
resetPayload["values"]["day_volume"] = 0
if weekReset:
resetPayload["values"]["last_week_volume"] = payload["values"]["week_volume"]
resetPayload["values"]["week_volume"] = 0
if monthReset:
resetPayload["values"]["last_month_volume"] = payload["values"]["month_volume"]
resetPayload["values"]["month_volume"] = 0
if yearReset:
resetPayload["values"]["last_year_volume"] = payload["values"]["year_volume"]
resetPayload["values"]["year_volume"] = 0
if resetPayload["values"]:
resetPayload["ts"] = 1 + (round(dt.timestamp(dt.now())/600)*600)*1000
self.sendToTB(json.dumps(resetPayload))
if self.force_send:
self.force_send = False
def flowmeterskid_sync(self, name, value):
"""Sync all data from the driver."""
self.force_send = True
self.sendtodb("log", "synced", 0)
return True
def saveTotalizers(self, totalizers):
try:
with open("/root/python_firmware/totalizers.json", "w") as t:
json.dump(totalizers,t)
except Exception as e:
logger.error(e)
def get_totalizers(self):
saveFile = "/root/python_firmware/totalizers.json"
# Check if the state file exists.
if not os.path.exists(saveFile):
return {
"day": 0,
"week": 0,
"month": 0,
"year": 0,
"lifetime": 0,
"dayHolding": 0,
"weekHolding": 0,
"monthHolding": 0,
"yearHolding": 0
}
try:
with open("/root/python_firmware/totalizers.json", "r") as t:
totalizers = json.load(t)
if not totalizers:
logger.info("-----INITIALIZING TOTALIZERS-----")
totalizers = {
"day": 0,
"week": 0,
"month": 0,
"year": 0,
"lifetime": 0,
"dayHolding": 0,
"weekHolding": 0,
"monthHolding": 0,
"yearHolding": 0
}
except:
totalizers = {
"day": 0,
"week": 0,
"month": 0,
"year": 0,
"lifetime": 0,
"dayHolding": 0,
"weekHolding": 0,
"monthHolding": 0,
"yearHolding": 0
}
return totalizers
def totalizeDay(self,lifetime):
totalizers = self.get_totalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["dayHolding"]
if not int(now.strftime("%d")) == int(totalizers["day"]):
totalizers["dayHolding"] = lifetime
totalizers["day"] = int(now.strftime("%d"))
self.saveTotalizers(totalizers)
reset = True
return (value,reset)
def totalizeWeek(self,lifetime):
totalizers = self.get_totalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["weekHolding"]
if (not now.strftime("%U") == totalizers["week"] and now.strftime("%a") == "Sun") or totalizers["week"] == 0:
totalizers["weekHolding"] = lifetime
totalizers["week"] = now.strftime("%U")
self.saveTotalizers(totalizers)
reset = True
return (value, reset)
def totalizeMonth(self,lifetime):
totalizers = self.get_totalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["monthHolding"]
if not int(now.strftime("%m")) == int(totalizers["month"]):
totalizers["monthHolding"] = lifetime
totalizers["month"] = now.strftime("%m")
self.saveTotalizers(totalizers)
reset = True
return (value,reset)
def totalizeYear(self,lifetime):
totalizers = self.get_totalizers()
now = dt.fromtimestamp(round(dt.timestamp(dt.now())/600)*600)
reset = False
value = lifetime - totalizers["yearHolding"]
if not int(now.strftime("%Y")) == int(totalizers["year"]):
totalizers["yearHolding"] = lifetime
totalizers["year"] = now.strftime("%Y")
self.saveTotalizers(totalizers)
reset = True
return (value, reset)

View File

@@ -0,0 +1,21 @@
"""Data persistance functions."""
# if more advanced persistence is needed, use a sqlite database
import json
def load(filename="persist.json"):
"""Load persisted settings from the specified file."""
try:
with open(filename, 'r') as persist_file:
return json.load(persist_file)
except Exception:
return False
def store(persist_obj, filename="persist.json"):
"""Store the persisting settings into the specified file."""
try:
with open(filename, 'w') as persist_file:
return json.dump(persist_obj, persist_file, indent=4)
except Exception:
return False

View File

@@ -0,0 +1,62 @@
"""Utility functions for the driver."""
import socket
import struct
import urllib
import contextlib
def get_private_ip_address():
"""Find the private IP Address of the host device."""
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.connect(("8.8.8.8", 80))
ip_address = sock.getsockname()[0]
sock.close()
except Exception as e:
return e
return ip_address
def get_public_ip_address():
ip_address = "0.0.0.0"
try:
with contextlib.closing(urllib.urlopen("http://checkip.amazonaws.com")) as url:
ip_address = url.read()
except Exception as e:
print("Could not resolve address: {}".format(e))
return ip_address
return ip_address[:-1]
def int_to_float16(int_to_convert):
"""Convert integer into float16 representation."""
bin_rep = ('0' * 16 + '{0:b}'.format(int_to_convert))[-16:]
sign = 1.0
if int(bin_rep[0]) == 1:
sign = -1.0
exponent = float(int(bin_rep[1:6], 2))
fraction = float(int(bin_rep[6:17], 2))
if exponent == float(0b00000):
return sign * 2 ** -14 * fraction / (2.0 ** 10.0)
elif exponent == float(0b11111):
if fraction == 0:
return sign * float("inf")
return float("NaN")
frac_part = 1.0 + fraction / (2.0 ** 10.0)
return sign * (2 ** (exponent - 15)) * frac_part
def ints_to_float(int1, int2):
"""Convert 2 registers into a floating point number."""
mypack = struct.pack('>HH', int1, int2)
f_unpacked = struct.unpack('>f', mypack)
print("[{}, {}] >> {}".format(int1, int2, f_unpacked[0]))
return f_unpacked[0]
def degf_to_degc(temp_f):
"""Convert deg F to deg C."""
return (temp_f - 32.0) * (5.0/9.0)
def degc_to_degf(temp_c):
"""Convert deg C to deg F."""
return temp_c * 1.8 + 32.0

23
tb_report/.gitignore vendored Normal file
View File

@@ -0,0 +1,23 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# production
/build
# misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*

View File

@@ -0,0 +1,302 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import logging, json, xlsxwriter\n",
"import pandas as pd\n",
"from datetime import datetime as dt\n",
"from datetime import timedelta as td\n",
"from tb_rest_client.rest_client_ce import *\n",
"from tb_rest_client.rest import ApiException"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"logging.basicConfig(level=logging.DEBUG,\n",
" format='%(asctime)s - %(levelname)s - %(module)s - %(lineno)d - %(message)s',\n",
" datefmt='%Y-%m-%d %H:%M:%S')"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"# ThingsBoard REST API URL\n",
"url = \"https://hp.henrypump.cloud\"\n",
"# Default Tenant Administrator credentials\n",
"username = \"henry.pump.automation@gmail.com\"\n",
"password = \"Henry Pump @ 2022\""
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"def getDevices(rest_client, customers,target_customer, page=0, pageSize=500):\n",
" for c in customers.data:\n",
" if c.name == target_customer:\n",
" cid = c.id.id\n",
" devices = rest_client.get_customer_devices(customer_id=cid, page_size=pageSize, page=page)\n",
" return devices.to_dict()"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"def getDeviceKeys(rest_client, devices,target_device):\n",
" try:\n",
" for d in devices['data']:\n",
" if d[\"name\"] == target_device:\n",
" did = d['id']['id']\n",
" eType = d['id']['entity_type']\n",
" keys = rest_client.get_timeseries_keys_v1(entity_type=eType, entity_id=did)\n",
" return eType, did, keys, None\n",
" return None,None,None,\"Device Not Found\"\n",
" except Exception as e:\n",
" print(e)\n",
" return (None, None, None, e)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"def getTelemetry(rest_client, devices, keys, start_ts, end_ts, entity_type, entity_id, limit):\n",
" return rest_client.get_timeseries(entity_type=entity_type, entity_id=entity_id, keys=keys, start_ts=start_ts, end_ts=end_ts, limit=limit)\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"def getTime(timeRequest):\n",
" start_ts, end_ts = 0,0\n",
" if timeRequest[\"type\"] == \"last\":\n",
" now = dt.now()\n",
" delta = td(days=timeRequest[\"days\"], seconds=timeRequest[\"seconds\"], microseconds=timeRequest[\"microseconds\"], milliseconds=timeRequest[\"milliseconds\"], minutes=timeRequest[\"minutes\"], hours=timeRequest[\"hours\"], weeks=timeRequest[\"weeks\"])\n",
" start_ts = str(int(dt.timestamp(now - delta) * 1000))\n",
" end_ts = str(int(dt.timestamp(now) * 1000))\n",
" \n",
" return (start_ts, end_ts)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"def getThingsBoardData(url, username, password, targetCustomer, timeRequest):\n",
" # Creating the REST client object with context manager to get auto token refresh\n",
" with RestClientCE(base_url=url) as rest_client:\n",
" try:\n",
" # Auth with credentials\n",
" rest_client.login(username=username, password=password)\n",
" # Get customers > get devices under a target customer > get keys for devices > get data for devices\n",
" customers = rest_client.get_customers(page_size=\"100\", page=\"0\")\n",
" devices = getDevices(rest_client=rest_client, customers=customers, target_customer=targetCustomer)\n",
" telemetry = {}\n",
" for d in devices['data']:\n",
" entity_type, entity_id, keys, err = getDeviceKeys(rest_client=rest_client, devices=devices, target_device=d['name'])\n",
" start_ts, end_ts = getTime(timeRequest)\n",
" telemetry[d['name']] = getTelemetry(rest_client=rest_client, devices=devices, keys=','.join(keys), start_ts=start_ts, end_ts=end_ts, entity_id=entity_id, entity_type=entity_type, limit=25000)\n",
" return telemetry\n",
" except ApiException as e:\n",
" logging.error(e)\n",
" return False\n"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"def formatColumnName(telemetryName):\n",
" return \" \".join([x.capitalize() for x in telemetryName.split(\"_\")])"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"def getDataFrame(telemetry, ignore_keys): \n",
" df = pd.DataFrame()\n",
" #for location in telemetry.keys():\n",
" # Iterate through each datapoint within each location\n",
" for datapoint in telemetry.keys():\n",
" # Convert the datapoint list of dictionaries to a DataFrame\n",
" if datapoint not in ignore_keys:\n",
" temp_df = pd.DataFrame(telemetry[datapoint])\n",
" temp_df['ts'] = pd.to_datetime(temp_df['ts'], unit='ms').dt.tz_localize('UTC').dt.tz_convert('US/Central').dt.tz_localize(None)\n",
" # Set 'ts' as the index\n",
" temp_df.set_index('ts', inplace=True)\n",
" # Rename 'value' column to the name of the datapoint\n",
" temp_df.rename(columns={'value': formatColumnName(datapoint)}, inplace=True)\n",
" \n",
" # Join the temp_df to the main DataFrame\n",
" df = df.join(temp_df, how='outer')\n",
"\n",
" df = df.fillna(method='ffill', limit=2)\n",
" # Rename index to 'Date'\n",
" df.rename_axis('Date', inplace=True)\n",
" return df"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [],
"source": [
"time = {\n",
" \"type\": \"last\",\n",
" \"days\":3,\n",
" \"seconds\":0,\n",
" \"microseconds\":0,\n",
" \"milliseconds\":0,\n",
" \"minutes\":0,\n",
" \"hours\":0,\n",
" \"weeks\":0\n",
" }\n",
"telemetry = getThingsBoardData(url, username, password, \"Faskens\", time)"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/nico/miniforge3/envs/thingsboard/lib/python3.10/site-packages/xlsxwriter/worksheet.py:3261: UserWarning: Must have at least one data row in in add_table()\n",
" warn(\"Must have at least one data row in in add_table()\")\n",
"/Users/nico/miniforge3/envs/thingsboard/lib/python3.10/site-packages/xlsxwriter/worksheet.py:3261: UserWarning: Must have at least one data row in in add_table()\n",
" warn(\"Must have at least one data row in in add_table()\")\n",
"/Users/nico/miniforge3/envs/thingsboard/lib/python3.10/site-packages/xlsxwriter/worksheet.py:3261: UserWarning: Must have at least one data row in in add_table()\n",
" warn(\"Must have at least one data row in in add_table()\")\n",
"/Users/nico/miniforge3/envs/thingsboard/lib/python3.10/site-packages/xlsxwriter/worksheet.py:3261: UserWarning: Must have at least one data row in in add_table()\n",
" warn(\"Must have at least one data row in in add_table()\")\n",
"/Users/nico/miniforge3/envs/thingsboard/lib/python3.10/site-packages/xlsxwriter/worksheet.py:3261: UserWarning: Must have at least one data row in in add_table()\n",
" warn(\"Must have at least one data row in in add_table()\")\n"
]
}
],
"source": [
"\n",
"# Create a Pandas Excel writer using XlsxWriter as the engine.\n",
"writer = pd.ExcelWriter(\"/Users/nico/Documents/test/pandas_table.xlsx\", engine=\"xlsxwriter\",\n",
" datetime_format=\"yyyy-mm-dd hh:mm:ss\",\n",
" date_format=\"yyyy-mm-dd\",engine_kwargs={'options': {'strings_to_numbers': True}})\n",
"chartsheet = writer.book.add_worksheet(\"Charts\")\n",
"\n",
"for device in telemetry.keys():\n",
" df = getDataFrame(telemetry[device], [\"yesterday_volume\"])\n",
" # Write the dataframe data to XlsxWriter. Turn off the default header and\n",
" # index and skip one row to allow us to insert a user defined header.\n",
" df.to_excel(writer, sheet_name=device, startrow=1, header=False, index=True)\n",
"\n",
" # Get the xlsxwriter workbook and worksheet objects.\n",
" workbook = writer.book\n",
" worksheet = writer.sheets[device]\n",
"\n",
" # Get the dimensions of the dataframe.\n",
" (max_row, max_col) = df.shape\n",
" \n",
" # Create a list of column headers, to use in add_table().\n",
" column_settings = [{\"header\": column} for column in df.columns]\n",
" # Add the Excel table structure. Pandas will add the data.\n",
" worksheet.add_table(0, 0, max_row, max_col, {\"columns\": [{'header': 'Date'}] + column_settings})\n",
"\n",
" # Make the columns wider for clarity.\n",
" worksheet.set_column(0, max_col , 18)\n",
"charts = []#[{\"chartType\": \"line\",\"columnName\": \"temperature\"}, {\"chartType\": \"line\", \"columnName\": \"volume_flow\"},{\"chartType\": \"bar\", \"columnName\": \"volume_flow\"},{\"chartType\": \"pie\", \"columnName\": \"today_volume\"},{\"chartType\": \"pie\", \"columnName\": \"month_volume\"}]\n",
"position = 1\n",
"pie_chart_count = 1\n",
"for c in charts:\n",
" # Configure the chart. In simplest case we add one or more data series.\n",
" chart = writer.book.add_chart({'type': c[\"chartType\"]})\n",
" if c[\"chartType\"] == \"pie\":\n",
" pieWorksheet = writer.book.add_worksheet(\"Pie Chart \" + str(pie_chart_count))\n",
" pieData = []\n",
" \n",
" for device in telemetry.keys():\n",
" #print(device)\n",
" dataColumn = df.columns.get_loc(formatColumnName(c[\"columnName\"])) + 1\n",
" if c[\"chartType\"] in [ \"line\", \"bar\"]:\n",
" chart.add_series({\n",
" 'values': [device, 1, dataColumn ,max_row,dataColumn],\n",
" 'categories': [device, 1,0,max_row,0],\n",
" 'name': device,\n",
"\n",
" })\n",
" chart.set_x_axis({'text_axis': True})\n",
" elif c[\"chartType\"] == \"pie\":\n",
" #pie charts don't allow data from multiple worksheets need to make a new worksheet with all the desired data so add the data to the worksheet here\n",
" pieData.append([device, \"='\" + device + \"'!$\" + chr(65+dataColumn) + \"$\" + str(max_row)])\n",
" if c[\"chartType\"] == \"pie\":\n",
" for row in range(len(pieData)):\n",
" pieWorksheet.write_row(row, 0, pieData[row])\n",
" chart.add_series({\n",
" 'values': [\"Pie Chart \" + str(pie_chart_count), 0, 1 ,len(pieData)-1,1],\n",
" 'categories': [\"Pie Chart \" + str(pie_chart_count), 0, 0 ,len(pieData)-1,0],\n",
" 'name': device,\n",
" 'data_labels': {'value': True, 'category': True, 'position': 'best_fit'}\n",
" })\n",
" pie_chart_count += 1\n",
" chart.set_title({\"name\": formatColumnName(c[\"columnName\"])})\n",
" \n",
" chartsheet.insert_chart('A' + str(position), chart, {'x_scale': 3, 'y_scale': 2})\n",
" position += 30\n",
"# Close the Pandas Excel writer and output the Excel file.\n",
"writer.close()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "thingsboard",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.5"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -0,0 +1,70 @@
# Getting Started with Create React App
This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
## Available Scripts
In the project directory, you can run:
### `npm start`
Runs the app in the development mode.\
Open [http://localhost:3000](http://localhost:3000) to view it in your browser.
The page will reload when you make changes.\
You may also see any lint errors in the console.
### `npm test`
Launches the test runner in the interactive watch mode.\
See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information.
### `npm run build`
Builds the app for production to the `build` folder.\
It correctly bundles React in production mode and optimizes the build for the best performance.
The build is minified and the filenames include the hashes.\
Your app is ready to be deployed!
See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
### `npm run eject`
**Note: this is a one-way operation. Once you `eject`, you can't go back!**
If you aren't satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project.
Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you're on your own.
You don't have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn't feel obligated to use this feature. However we understand that this tool wouldn't be useful if you couldn't customize it when you are ready for it.
## Learn More
You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started).
To learn React, check out the [React documentation](https://reactjs.org/).
### Code Splitting
This section has moved here: [https://facebook.github.io/create-react-app/docs/code-splitting](https://facebook.github.io/create-react-app/docs/code-splitting)
### Analyzing the Bundle Size
This section has moved here: [https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size](https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size)
### Making a Progressive Web App
This section has moved here: [https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app](https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app)
### Advanced Configuration
This section has moved here: [https://facebook.github.io/create-react-app/docs/advanced-configuration](https://facebook.github.io/create-react-app/docs/advanced-configuration)
### Deployment
This section has moved here: [https://facebook.github.io/create-react-app/docs/deployment](https://facebook.github.io/create-react-app/docs/deployment)
### `npm run build` fails to minify
This section has moved here: [https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify](https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify)

18367
tb_report/frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,46 @@
{
"name": "tb_report",
"version": "0.1.0",
"private": true,
"dependencies": {
"@emotion/react": "^11.11.1",
"@emotion/styled": "^11.11.0",
"@mui/material": "^5.13.5",
"@mui/styled-engine-sc": "^5.12.0",
"@mui/x-date-pickers": "^6.7.0",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
"@testing-library/user-event": "^13.5.0",
"@wojtekmaj/react-datetimerange-picker": "^5.2.0",
"dayjs": "^1.11.8",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-scripts": "5.0.1",
"styled-components": "^5.3.11",
"web-vitals": "^2.1.4"
},
"scripts": {
"start": "react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject"
},
"eslintConfig": {
"extends": [
"react-app",
"react-app/jest"
]
},
"browserslist": {
"production": [
">0.2%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

View File

@@ -0,0 +1,43 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<meta
name="description"
content="Web site created using create-react-app"
/>
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
<!--
manifest.json provides metadata used when your web app is installed on a
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
-->
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
<!--
Notice the use of %PUBLIC_URL% in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>React App</title>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.4 KiB

View File

@@ -0,0 +1,25 @@
{
"short_name": "React App",
"name": "Create React App Sample",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
},
{
"src": "logo192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "logo512.png",
"type": "image/png",
"sizes": "512x512"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

View File

@@ -0,0 +1,3 @@
# https://www.robotstxt.org/robotstxt.html
User-agent: *
Disallow:

View File

@@ -0,0 +1,48 @@
html,
body {
height: 100%;
}
body {
margin: 0;
font-family: Segoe UI, Tahoma, sans-serif;
}
.Sample input,
.Sample button {
font: inherit;
}
.Sample header {
background-color: #323639;
box-shadow: 0 0 8px rgba(0, 0, 0, 0.5);
padding: 20px;
color: white;
}
.Sample header h1 {
font-size: inherit;
margin: 0;
}
.Sample__container {
display: flex;
flex-direction: row;
flex-wrap: wrap;
align-items: flex-start;
margin: 10px 0;
padding: 10px;
}
.Sample__container > * > * {
margin: 10px;
}
.Sample__container__content {
display: flex;
max-width: 100%;
flex-basis: 420px;
flex-direction: column;
flex-grow: 100;
align-items: stretch;
}

View File

@@ -0,0 +1,29 @@
import React, { useState } from 'react';
import DateTimeRangePicker from '@wojtekmaj/react-datetimerange-picker';
import './App.css';
type ValuePiece = Date | null;
type Value = ValuePiece | [ValuePiece, ValuePiece];
const now = new Date();
const yesterdayBegin = new Date(now.getFullYear(), now.getMonth(), now.getDate() - 1);
const todayNoon = new Date(now.getFullYear(), now.getMonth(), now.getDate(), 12);
export default function Sample() {
const [value, onChange] = useState([yesterdayBegin, todayNoon]);
return (
<div className="Sample">
<header>
<h1>react-datetimerange-picker sample page</h1>
</header>
<div className="Sample__container">
<main className="Sample__container__content">
<DateTimeRangePicker onChange={onChange} value={value} />
</main>
</div>
</div>
);
}

View File

@@ -0,0 +1,8 @@
import { render, screen } from '@testing-library/react';
import App from './App';
test('renders learn react link', () => {
render(<App />);
const linkElement = screen.getByText(/learn react/i);
expect(linkElement).toBeInTheDocument();
});

View File

@@ -0,0 +1,13 @@
body {
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
code {
font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
monospace;
}

View File

@@ -0,0 +1,17 @@
import React from 'react';
import ReactDOM from 'react-dom/client';
import './index.css';
import Sample from './App';
import reportWebVitals from './reportWebVitals';
const root = ReactDOM.createRoot(document.getElementById('root'));
root.render(
<React.StrictMode>
<Sample />
</React.StrictMode>
);
// If you want to start measuring performance in your app, pass a function
// to log results (for example: reportWebVitals(console.log))
// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
reportWebVitals();

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 841.9 595.3"><g fill="#61DAFB"><path d="M666.3 296.5c0-32.5-40.7-63.3-103.1-82.4 14.4-63.6 8-114.2-20.2-130.4-6.5-3.8-14.1-5.6-22.4-5.6v22.3c4.6 0 8.3.9 11.4 2.6 13.6 7.8 19.5 37.5 14.9 75.7-1.1 9.4-2.9 19.3-5.1 29.4-19.6-4.8-41-8.5-63.5-10.9-13.5-18.5-27.5-35.3-41.6-50 32.6-30.3 63.2-46.9 84-46.9V78c-27.5 0-63.5 19.6-99.9 53.6-36.4-33.8-72.4-53.2-99.9-53.2v22.3c20.7 0 51.4 16.5 84 46.6-14 14.7-28 31.4-41.3 49.9-22.6 2.4-44 6.1-63.6 11-2.3-10-4-19.7-5.2-29-4.7-38.2 1.1-67.9 14.6-75.8 3-1.8 6.9-2.6 11.5-2.6V78.5c-8.4 0-16 1.8-22.6 5.6-28.1 16.2-34.4 66.7-19.9 130.1-62.2 19.2-102.7 49.9-102.7 82.3 0 32.5 40.7 63.3 103.1 82.4-14.4 63.6-8 114.2 20.2 130.4 6.5 3.8 14.1 5.6 22.5 5.6 27.5 0 63.5-19.6 99.9-53.6 36.4 33.8 72.4 53.2 99.9 53.2 8.4 0 16-1.8 22.6-5.6 28.1-16.2 34.4-66.7 19.9-130.1 62-19.1 102.5-49.9 102.5-82.3zm-130.2-66.7c-3.7 12.9-8.3 26.2-13.5 39.5-4.1-8-8.4-16-13.1-24-4.6-8-9.5-15.8-14.4-23.4 14.2 2.1 27.9 4.7 41 7.9zm-45.8 106.5c-7.8 13.5-15.8 26.3-24.1 38.2-14.9 1.3-30 2-45.2 2-15.1 0-30.2-.7-45-1.9-8.3-11.9-16.4-24.6-24.2-38-7.6-13.1-14.5-26.4-20.8-39.8 6.2-13.4 13.2-26.8 20.7-39.9 7.8-13.5 15.8-26.3 24.1-38.2 14.9-1.3 30-2 45.2-2 15.1 0 30.2.7 45 1.9 8.3 11.9 16.4 24.6 24.2 38 7.6 13.1 14.5 26.4 20.8 39.8-6.3 13.4-13.2 26.8-20.7 39.9zm32.3-13c5.4 13.4 10 26.8 13.8 39.8-13.1 3.2-26.9 5.9-41.2 8 4.9-7.7 9.8-15.6 14.4-23.7 4.6-8 8.9-16.1 13-24.1zM421.2 430c-9.3-9.6-18.6-20.3-27.8-32 9 .4 18.2.7 27.5.7 9.4 0 18.7-.2 27.8-.7-9 11.7-18.3 22.4-27.5 32zm-74.4-58.9c-14.2-2.1-27.9-4.7-41-7.9 3.7-12.9 8.3-26.2 13.5-39.5 4.1 8 8.4 16 13.1 24 4.7 8 9.5 15.8 14.4 23.4zM420.7 163c9.3 9.6 18.6 20.3 27.8 32-9-.4-18.2-.7-27.5-.7-9.4 0-18.7.2-27.8.7 9-11.7 18.3-22.4 27.5-32zm-74 58.9c-4.9 7.7-9.8 15.6-14.4 23.7-4.6 8-8.9 16-13 24-5.4-13.4-10-26.8-13.8-39.8 13.1-3.1 26.9-5.8 41.2-7.9zm-90.5 125.2c-35.4-15.1-58.3-34.9-58.3-50.6 0-15.7 22.9-35.6 58.3-50.6 8.6-3.7 18-7 27.7-10.1 5.7 19.6 13.2 40 22.5 60.9-9.2 20.8-16.6 41.1-22.2 60.6-9.9-3.1-19.3-6.5-28-10.2zM310 490c-13.6-7.8-19.5-37.5-14.9-75.7 1.1-9.4 2.9-19.3 5.1-29.4 19.6 4.8 41 8.5 63.5 10.9 13.5 18.5 27.5 35.3 41.6 50-32.6 30.3-63.2 46.9-84 46.9-4.5-.1-8.3-1-11.3-2.7zm237.2-76.2c4.7 38.2-1.1 67.9-14.6 75.8-3 1.8-6.9 2.6-11.5 2.6-20.7 0-51.4-16.5-84-46.6 14-14.7 28-31.4 41.3-49.9 22.6-2.4 44-6.1 63.6-11 2.3 10.1 4.1 19.8 5.2 29.1zm38.5-66.7c-8.6 3.7-18 7-27.7 10.1-5.7-19.6-13.2-40-22.5-60.9 9.2-20.8 16.6-41.1 22.2-60.6 9.9 3.1 19.3 6.5 28.1 10.2 35.4 15.1 58.3 34.9 58.3 50.6-.1 15.7-23 35.6-58.4 50.6zM320.8 78.4z"/><circle cx="420.9" cy="296.5" r="45.7"/><path d="M520.5 78.1z"/></g></svg>

After

Width:  |  Height:  |  Size: 2.6 KiB

View File

@@ -0,0 +1,13 @@
const reportWebVitals = onPerfEntry => {
if (onPerfEntry && onPerfEntry instanceof Function) {
import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => {
getCLS(onPerfEntry);
getFID(onPerfEntry);
getFCP(onPerfEntry);
getLCP(onPerfEntry);
getTTFB(onPerfEntry);
});
}
};
export default reportWebVitals;

View File

@@ -0,0 +1,5 @@
// jest-dom adds custom jest matchers for asserting on DOM nodes.
// allows you to do things like:
// expect(element).toHaveTextContent(/react/i)
// learn more: https://github.com/testing-library/jest-dom
import '@testing-library/jest-dom';

View File

@@ -33,12 +33,17 @@ class ThingsBoardAPI():
return devices.json() return devices.json()
def getDeviceKeys(self, devices,target_device): def getDeviceKeys(self, devices,target_device):
for d in devices['data']: try:
if d["name"] == target_device: for d in devices['data']:
did = d['id']['id'] #print(d["name"])
eType = d['id']['entityType'] if d["name"] == target_device:
keys = requests.get(self.url_base + f"plugins/telemetry/{eType}/{did}/keys/timeseries", headers=self.headers) did = d['id']['id']
return eType, did, keys.json() eType = d['id']['entityType']
keys = requests.get(self.url_base + f"plugins/telemetry/{eType}/{did}/keys/timeseries", headers=self.headers)
return eType, did, keys.json(), None
return None,None,None,"Device Not Found"
except Exception as e:
return (None, None, None, e)
def getTelemetry(self, startTs, endTs, keys, eType, did): def getTelemetry(self, startTs, endTs, keys, eType, did):
telemetry = requests.get(self.url_base + f"plugins/telemetry/{eType}/{did}/values/timeseries?startTs={startTs}&endTs={endTs}&keys={keys}", headers=self.headers) telemetry = requests.get(self.url_base + f"plugins/telemetry/{eType}/{did}/values/timeseries?startTs={startTs}&endTs={endTs}&keys={keys}", headers=self.headers)