Converts all to working Lambda function within AWS.
reports_xlsx.py left in the repo for legacy implementation
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -106,3 +106,6 @@ files/*
|
||||
|
||||
*_to.json
|
||||
*.bak
|
||||
|
||||
deploy/*
|
||||
lambda.zip
|
||||
|
||||
11
build_lambda.sh
Executable file
11
build_lambda.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
!#/bin/bash
|
||||
|
||||
rm -f lambda.zip
|
||||
mkdir -p deploy
|
||||
cp env/lib/python3.6/site-packages/* deploy
|
||||
cp meshify.py deploy
|
||||
cp reports_s3_xlsx.py deploy
|
||||
|
||||
cd deploy
|
||||
zip -r ../lambda.zip ./*
|
||||
cd ..
|
||||
32
clearData.py
32
clearData.py
@@ -1,32 +0,0 @@
|
||||
import pymssql
|
||||
from os import getenv
|
||||
|
||||
SQL_SERVER = getenv("HP_SQL_SERVER")
|
||||
SQL_USER = getenv("HP_SQL_USER")
|
||||
SQL_PASSWORD = getenv("HP_SQL_PASSWORD")
|
||||
|
||||
SQL_DB = "POCCLoud"
|
||||
SQL_TABLE = "Production"
|
||||
|
||||
|
||||
def main(test_mode=False):
|
||||
if (not SQL_SERVER or not SQL_USER or not SQL_PASSWORD) and not test_mode:
|
||||
print("Be sure to set the SQL Server, username, and password as enviroment variables HP_SQL_SERVER, HP_SQL_USER, and HP_SQL_PASSWORD")
|
||||
exit()
|
||||
|
||||
conn = pymssql.connect(SQL_SERVER, SQL_USER, SQL_PASSWORD, SQL_DB)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("DELETE FROM Production WHERE id > 0")
|
||||
conn.commit()
|
||||
|
||||
cursor.execute("SELECT * FROM Production")
|
||||
row = cursor.fetchone()
|
||||
while row:
|
||||
print(row)
|
||||
row = cursor.fetchone()
|
||||
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,19 +0,0 @@
|
||||
"""Get all gateways from the Meshify API."""
|
||||
import meshify
|
||||
import json
|
||||
|
||||
|
||||
def main():
|
||||
"""Run the main function."""
|
||||
gateways = meshify.query_meshify_api("gateways")
|
||||
with open("gateways.json", 'wb') as jsonfile:
|
||||
json.dump(gateways, jsonfile, indent=4, sort_keys=True)
|
||||
csv_string = "Gateway,ipaddress,url\n"
|
||||
for g in gateways:
|
||||
csv_string += "{},,\n".format(g['name'])
|
||||
|
||||
with open("gateways.csv", 'wb') as csvfile:
|
||||
csvfile.write(csv_string)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,77 +0,0 @@
|
||||
import requests
|
||||
import json
|
||||
from os import getenv
|
||||
from sys import exit, argv
|
||||
from datetime import datetime
|
||||
import meshify
|
||||
|
||||
SQL_SERVER = getenv("HP_SQL_SERVER")
|
||||
SQL_USER = getenv("HP_SQL_USER")
|
||||
SQL_PASSWORD = getenv("HP_SQL_PASSWORD")
|
||||
|
||||
SQL_DB = "POCCLoud"
|
||||
SQL_TABLE = "Production"
|
||||
|
||||
|
||||
def main(test_mode=False):
|
||||
if not MESHIFY_USERNAME or not MESHIFY_PASSWORD:
|
||||
print("Be sure to set the meshify username and password as environment variables MESHIFY_USERNAME and MESHIFY_PASSWORD")
|
||||
exit()
|
||||
|
||||
if (not SQL_SERVER or not SQL_USER or not SQL_PASSWORD) and not test_mode:
|
||||
print("Be sure to set the SQL Server, username, and password as enviroment variables HP_SQL_SERVER, HP_SQL_USER, and HP_SQL_PASSWORD")
|
||||
exit()
|
||||
|
||||
if not test_mode:
|
||||
import pymssql
|
||||
|
||||
devicetypes = meshify.query_meshify_api("devicetypes")
|
||||
companies = meshify.query_meshify_api("companies")
|
||||
henrypetroleum_company = meshify.find_by_name("Henry Petroleum", companies)
|
||||
devices = meshify.query_meshify_api("devices")
|
||||
gateways = meshify.query_meshify_api("gateways")
|
||||
|
||||
abbflow_devicetype = meshify.find_by_name("abbflow", devicetypes)
|
||||
abbflow_devices = list(filter(lambda x: x['deviceTypeId'] == abbflow_devicetype['id'] and x['companyId'] == henrypetroleum_company['id'], devices))
|
||||
abbflowchannels = meshify.query_meshify_api("devicetypes/{}/channels".format(abbflow_devicetype['id']))
|
||||
|
||||
abbflow_yesterdaytotal_channel = meshify.find_by_name("yesterday_volume", abbflowchannels)
|
||||
|
||||
query_params = []
|
||||
for abbflow_dev in abbflow_devices:
|
||||
abbflowdevvalues = meshify.query_meshify_api("devices/{}/values".format(abbflow_dev['id']))
|
||||
try:
|
||||
yest_volume = float(abbflowdevvalues['yesterday_volume']['value'])
|
||||
gateway_id = abbflow_dev['gatewayId']
|
||||
unix_ts = float(abbflowdevvalues['yesterday_volume']['timestamp'])
|
||||
local_time = datetime.utcfromtimestamp(unix_ts)
|
||||
midn_time = datetime(local_time.year, local_time.month, local_time.day, 0, 1)
|
||||
query_params.append((gateway_id, yest_volume, midn_time))
|
||||
except ValueError:
|
||||
pass
|
||||
if not test_mode:
|
||||
conn = pymssql.connect(SQL_SERVER, SQL_USER, SQL_PASSWORD, SQL_DB)
|
||||
cursor = conn.cursor()
|
||||
cursor.executemany("INSERT INTO Production (well_id, yesterday_production, timestamp) VALUES (%d, %d, %s)", query_params)
|
||||
conn.commit()
|
||||
|
||||
cursor.execute("SELECT * FROM Production")
|
||||
print("Fetching from db at {}".format(datetime.now()))
|
||||
row = cursor.fetchone()
|
||||
while row:
|
||||
print(row)
|
||||
row = cursor.fetchone()
|
||||
print("==============")
|
||||
conn.close()
|
||||
else:
|
||||
print("Fake Fetching from db at {}".format(datetime.now()))
|
||||
for q in query_params:
|
||||
print(q)
|
||||
print("==============")
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_mode = False
|
||||
if len(argv) > 1:
|
||||
if argv[1].lower() == "true":
|
||||
test_mode = True
|
||||
main(test_mode=test_mode)
|
||||
@@ -1,38 +0,0 @@
|
||||
"""Read a CSV file of channels and post them to Meshify via the API."""
|
||||
|
||||
import csv
|
||||
import meshify
|
||||
import sys
|
||||
|
||||
|
||||
def main(csv_file, devicetype):
|
||||
"""Main function."""
|
||||
csvfile = open(csv_file, 'rU')
|
||||
reader = csv.DictReader(csvfile, dialect=csv.excel)
|
||||
|
||||
channels = []
|
||||
idx = 0
|
||||
for x in reader:
|
||||
channels.append(x)
|
||||
channels[idx]["fromMe"] = False
|
||||
channels[idx]["regex"] = ""
|
||||
channels[idx]["regexErrMsg"] = ""
|
||||
channels[idx]["dataType"] = int(channels[idx]["dataType"])
|
||||
channels[idx]["deviceTypeId"] = int(channels[idx]["deviceTypeId"])
|
||||
channels[idx]["channelType"] = int(channels[idx]["channelType"])
|
||||
channels[idx]["io"] = bool(channels[idx]["io"])
|
||||
idx += 1
|
||||
|
||||
try:
|
||||
this_devicetype = meshify.find_by_name(devicetype, meshify.query_meshify_api("devicetypes"))
|
||||
for c in channels:
|
||||
print(meshify.post_meshify_api("devicetypes/{}/channels".format(this_devicetype['id']), c))
|
||||
except KeyError:
|
||||
print("Could not find key {}".format(devicetype))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) == 3:
|
||||
main(sys.argv[1], sys.argv[2])
|
||||
else:
|
||||
print("Syntax is python postChannels.py <filepath.csv> <devicetype name>")
|
||||
@@ -162,8 +162,7 @@ def prep_emails(device_type, channel_config_list, to_list):
|
||||
s3.Object(BUCKET_NAME, 'created_reports/{}_{}_{}.xlsx'.format(device_type, comp, filename_datestring)).put(Body=open('/tmp/{}_{}_{}.xlsx'.format(device_type, comp, filename_datestring), 'rb'))
|
||||
|
||||
try:
|
||||
# email_to = to_list[comp]
|
||||
email_to = ["pmcdonagh@henry-pump.com"]
|
||||
email_to = to_list[comp]
|
||||
except KeyError:
|
||||
logger.error("No recipients for that company({})!".format(comp))
|
||||
continue
|
||||
@@ -199,4 +198,4 @@ def lambda_handler(event, context):
|
||||
emails = prep_emails(device_type_list[i], channel_configs[i], to_lists[i])
|
||||
for email in emails:
|
||||
send_ses_email(email)
|
||||
logger.info("Sent email for {} to {}".format(device_type_list[i], ", ".join(email['To'])))
|
||||
logger.info("Sent email for {} to {}".format(device_type_list[i], email['To']))
|
||||
|
||||
@@ -182,7 +182,7 @@ if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('deviceType', help="Meshify device type")
|
||||
parser.add_argument('-s', '--send', action='store_true', help="Send emails to everyone in the _to.json file")
|
||||
parser.add_argument('-c', '--config-path', default=".", help="The folder path that holds the configuration files")
|
||||
parser.add_argument('-c', '--config-path', default="channel_config", help="The folder path that holds the configuration files")
|
||||
parser.add_argument('-o', '--output-path', default="files", help="The folder path that holds the output files")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
Reference in New Issue
Block a user