diff --git a/EKKO Reports/thunderbirdfs-daily-report/~$ACW Daily Report Template-test.xlsx b/EKKO Reports/thunderbirdfs-daily-report/~$ACW Daily Report Template-test.xlsx deleted file mode 100644 index df8ca57..0000000 Binary files a/EKKO Reports/thunderbirdfs-daily-report/~$ACW Daily Report Template-test.xlsx and /dev/null differ diff --git a/Report Generator/RRig Monthly/.aws-sam/build.toml b/Report Generator/RRig Monthly/.aws-sam/build.toml new file mode 100644 index 0000000..e03d804 --- /dev/null +++ b/Report Generator/RRig Monthly/.aws-sam/build.toml @@ -0,0 +1,13 @@ +# This file is auto generated by SAM CLI build command + +[function_build_definitions.c89700f7-74ed-4d1b-a970-c398fad05879] +codeuri = "/Users/nico/Documents/GitHub/ThingsBoard/Report Generator/RRig Monthly/rrigreport" +runtime = "python3.12" +architecture = "arm64" +handler = "rrigreport.lambda_handler" +source_hash = "f1515dff698a5f587a1a00eecd520ea833bfb205fefc2d454278970b4f1bd138" +manifest_hash = "" +packagetype = "Zip" +functions = ["RRigReport"] + +[layer_build_definitions] diff --git a/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/__init__.py b/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/app.py b/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/app.py new file mode 100644 index 0000000..0930620 --- /dev/null +++ b/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/app.py @@ -0,0 +1,42 @@ +import json + +# import requests + + +def lambda_handler(event, context): + """Sample pure Lambda function + + Parameters + ---------- + event: dict, required + API Gateway Lambda Proxy Input Format + + Event doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html#api-gateway-simple-proxy-for-lambda-input-format + + context: object, required + Lambda Context runtime methods and attributes + + Context doc: https://docs.aws.amazon.com/lambda/latest/dg/python-context-object.html + + Returns + ------ + API Gateway Lambda Proxy Output Format: dict + + Return doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html + """ + + # try: + # ip = requests.get("http://checkip.amazonaws.com/") + # except requests.RequestException as e: + # # Send some context about this error to Lambda Logs + # print(e) + + # raise e + + return { + "statusCode": 200, + "body": json.dumps({ + "message": "hello world", + # "location": ip.text.replace("\n", "") + }), + } diff --git a/Report Generator/RRig Monthly/config.json b/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/config.json similarity index 100% rename from Report Generator/RRig Monthly/config.json rename to Report Generator/RRig Monthly/.aws-sam/build/RRigReport/config.json diff --git a/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/handler.py b/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/handler.py new file mode 100644 index 0000000..2bb3a8d --- /dev/null +++ b/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/handler.py @@ -0,0 +1,6 @@ +import json +def handler(event, context): + # Log the event argument for debugging and for use in local development. + print(json.dumps(event)) + + return {} \ No newline at end of file diff --git a/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/requirements.txt b/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/requirements.txt new file mode 100644 index 0000000..e69de29 diff --git a/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/rrigreport.py b/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/rrigreport.py new file mode 100644 index 0000000..5b1db90 --- /dev/null +++ b/Report Generator/RRig Monthly/.aws-sam/build/RRigReport/rrigreport.py @@ -0,0 +1,217 @@ +from tb_rest_client.rest_client_pe import * +from tb_rest_client.rest import ApiException +import json, xlsxwriter, boto3, os, time, pytz, sys +from threading import Lock +from datetime import datetime as dt +from datetime import timedelta as td +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from email import encoders +from email.mime.base import MIMEBase + + +# Define a rate limiter class +class RateLimiter: + def __init__(self, max_calls, period): + self.max_calls = max_calls + self.period = period + self.call_times = [] + self.lock = Lock() + + def acquire(self): + with self.lock: + current_time = time.time() + # Remove expired calls + self.call_times = [t for t in self.call_times if t > current_time - self.period] + if len(self.call_times) >= self.max_calls: + # Wait for the oldest call to expire + time_to_wait = self.period - (current_time - self.call_times[0]) + time.sleep(time_to_wait) + # Register the current call + self.call_times.append(time.time()) + +# Initialize a rate limiter +rate_limiter = RateLimiter(max_calls=10, period=1) # Adjust `max_calls` and `period` as needed + +def sort_dict_keys(d): + """Sorts the keys of all nested dictionaries in a given dictionary. + + Args: + d: The input dictionary. + + Returns: + A new dictionary with sorted keys at each level. + """ + sorted_d = {} + for k, v in d.items(): + if isinstance(v, dict): + sorted_d[k] = sort_dict_keys(v) + else: + sorted_d[k] = v + return dict(sorted(sorted_d.items())) + + +def lambda_handler(event, context): + # Creating Rest Client for ThingsBoard + with RestClientPE(base_url="https://hp.henrypump.cloud") as rest_client: + try: + rest_client.login(username=os.environ["username"], password=os.environ["password"]) + # Loading Config from file + with open("./config.json") as f: + config = json.load(f) + reportData = {} + reportToList = {} + + # Loop through each item in config, each item represents a report + for report in config: + reportToList[report["name"]] = report["emails"] + for customer in report["customers"].keys(): + # Apply rate limiting for API calls + rate_limiter.acquire() + devices = rest_client.get_customer_devices(customer_id=customer, page=0, page_size=1000) + + if report["filterDevicesIn"]: + devices.data = [device for device in devices.data if device.id.id in report["filterDevicesIn"]] + if report["filterDevicesOut"]: + devices.data = [device for device in devices.data if device.id.id not in report["filterDevicesOut"]] + + if not reportData.get(report["name"], None): + reportData[report["name"]] = {} + + for device in devices.data: + for deviceType in report["customers"][customer]["deviceTypes"]: + if device.type == deviceType["deviceType"]: + rate_limiter.acquire() + keys = rest_client.get_timeseries_keys_v1(device.id) + keys = list(filter(lambda x: x in deviceType["dataPoints"], keys)) + #Check for report customer + if not reportData[report["name"]].get(report["customers"][customer]["name"], None): + reportData[report["name"]][report["customers"][customer]["name"]] = {} + #Check for device type in config + if device.type in list(map(lambda x: x["deviceType"], report["customers"][customer]["deviceTypes"])): + + if keys: + rate_limiter.acquire() + # Define the Chicago timezone + chicago_tz = pytz.timezone("America/Chicago") + + # Get today's date in Chicago timezone + today = dt.now(chicago_tz) + + # Get the first day of the current month + first_day_of_this_month = today.replace(day=1,hour=0,minute=9) + + # Calculate the first day of the previous month + last_day_of_last_month = first_day_of_this_month - td(days=1) + first_day_of_previous_month = last_day_of_last_month.replace(day=2) + + # Convert to timestamp in milliseconds + first_day_of_previous_month_timestamp_ms = int(first_day_of_previous_month.timestamp() * 1000) + first_day_of_this_month_timestamp_ms = int(first_day_of_this_month.timestamp() * 1000) + deviceData = rest_client.get_timeseries(entity_id=device.id, keys=",".join(keys),start_ts=first_day_of_previous_month_timestamp_ms, end_ts=first_day_of_this_month_timestamp_ms, agg=None, limit=5000, order_by="ASC") + + # Group data by day and keep the first point per day + filtered_data = {} + deviceDataFiltered = {} + for key, v in deviceData.items(): + for value in v: + # Convert timestamp to datetime in Chicago timezone + dttz = dt.fromtimestamp(value["ts"] / 1000, tz=chicago_tz) + + # Get the calendar day (date part) + day = dttz.date() + + # Keep only the first data point for each day + if day not in filtered_data: + filtered_data[day] = value + # Extract the filtered results + deviceDataFiltered[key] = list(filtered_data.values()) + for x in report["customers"][customer]["deviceTypes"]: + if x["deviceType"] == device.type: + labels = x["labels"] + labelled_data = {} + for k,v in labels.items(): + labelled_data[v] = {} + for k,v in deviceDataFiltered.items(): + labelled_data[labels[k]] = v + reportData[report["name"]][report["customers"][customer]["name"]][device.name] = labelled_data + else: + reportData[report["name"]][report["customers"][customer]["name"]][device.name] = {} + + #Sort Data + reportDataSorted = sort_dict_keys(reportData) + + #print(json.dumps(reportDataSorted,indent=4)) + except ApiException as e: + print(f"API Exception: {e}") + except Exception as e: + print(f"Other Exception in getting data:\n{e}") + print(type(e)) + + + # Create an AWS SES client + ses_client = boto3.client('ses', region_name='us-east-1') + s3 = boto3.resource('s3') + BUCKET_NAME = "thingsboard-email-reports" + # Create a workbook for each report + for report_name, report_data in reportDataSorted.items(): + #will generate an email lower down + spreadsheets = [] + # Create a worksheet for each company + for company_name, device_data in report_data.items(): + workbook = xlsxwriter.Workbook(f"/tmp/{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx",{'strings_to_numbers': True}) + bold = workbook.add_format({'bold': True}) + # Create a sheet for each device type + #for device_type, device_data in company_data.items(): + worksheet = workbook.add_worksheet("Totals") + + # Set the header column with device types + device_names = list(device_data.keys()) + #worksheet.write_column(1, 0, device_names,bold) + worksheet.write_row(0, 1, device_names,bold) + # Write the data to the sheet + for i, (telemetry_name, telemetry_data) in enumerate(device_data.items()): + # Set the header row with telemetry names + telemetry_names = []#list(telemetry_data.keys()) + for _, e in telemetry_data.items(): + for entry in e: + telemetry_names.append(str(dt.fromtimestamp(entry["ts"] / 1000, tz=chicago_tz).date() - td(days=1))) + + #telemetry_names = [dt.fromtimestamp(entry["ts"] / 1000, tz=chicago_tz).date() for _, entry in telemetry_data.items()] + #worksheet.write_row(0, 1, telemetry_names, bold) + worksheet.write_column(1, 0, telemetry_names, bold) + for j, (data_name, data) in enumerate(telemetry_data.items()): + values = [d["value"] for d in data] + #worksheet.write_row(i + 1, j+ 1, values) + worksheet.write_column( j+ 1,i + 1, values) + worksheet.autofit() + workbook.close() + spreadsheets.append(workbook) + + # Store the generated report in S3. + s3.Object(BUCKET_NAME, f'{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx').put(Body=open(f"/tmp/{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx", 'rb')) + if reportToList[report_name]: + # Create an email message + msg = MIMEMultipart() + msg['Subject'] = report_name + msg['From'] = 'alerts@henry-pump.com' + msg['To'] = ", ".join(reportToList[report_name]) + + # Add a text body to the message (optional) + body_text = 'Please find the attached spreadsheets.' + msg.attach(MIMEText(body_text, 'plain')) + + # Attach each workbook in the spreadsheets array + for spreadsheet in spreadsheets: + # Attach the file to the email message + attachment = MIMEBase('application', 'octet-stream') + attachment.set_payload(open(spreadsheet.filename, "rb").read()) + encoders.encode_base64(attachment) + attachment.add_header('Content-Disposition', 'attachment', filename=spreadsheet.filename[5:]) + + msg.attach(attachment) + # Send the email using AWS SES + response = ses_client.send_raw_email( + + RawMessage={'Data': msg.as_string()} + ) \ No newline at end of file diff --git a/Report Generator/RRig Monthly/.aws-sam/build/template.yaml b/Report Generator/RRig Monthly/.aws-sam/build/template.yaml new file mode 100644 index 0000000..cc0165a --- /dev/null +++ b/Report Generator/RRig Monthly/.aws-sam/build/template.yaml @@ -0,0 +1,81 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: 'RRig Monthly Totals Report + + ' +Globals: + Function: + Timeout: 3 +Resources: + RRigReport: + Type: AWS::Serverless::Function + Properties: + MemorySize: 128 + Timeout: 300 + Environment: + Variables: + username: henry.pump.automation@gmail.com + password: Henry Pump @ 2022 + RRIGREPORTBUCKET_BUCKET_NAME: + Fn::ImportValue: TBReportBucket + Architectures: + - arm64 + CodeUri: RRigReport + Runtime: python3.12 + Handler: rrigreport.lambda_handler + Policies: + - AmazonSESFullAccess + - Statement: + - Effect: Allow + Action: + - s3:PutObject + Resource: + - Fn::Sub: arn:${AWS::Partition}:s3:::!ImportValue TBReportBucket + - Fn::Sub: arn:${AWS::Partition}:s3:::!ImportValue TBReportBucket/* + Layers: + - Fn::ImportValue: TBReportLayer + Metadata: + SamResourceId: RRigReport + RRigSchedule: + Type: AWS::Scheduler::Schedule + Properties: + ScheduleExpression: cron(5 7 1 * ? *) + FlexibleTimeWindow: + Mode: 'OFF' + ScheduleExpressionTimezone: America/Chicago + Target: + Arn: + Fn::GetAtt: + - RRigReport + - Arn + RoleArn: + Fn::GetAtt: + - RRigScheduleToRRigReportRole + - Arn + RRigScheduleToRRigReportRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: '2012-10-17' + Statement: + Effect: Allow + Principal: + Service: + Fn::Sub: scheduler.${AWS::URLSuffix} + Action: sts:AssumeRole + Policies: + - PolicyName: StartExecutionPolicy + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: + - lambda:InvokeFunction + - s3:GetObject + - s3:PutObject + Resource: + - Fn::GetAtt: + - RRigReport + - Arn + - Fn::Sub: arn:${AWS::Partition}:s3:::!ImportValue TBReportBucket + - Fn::Sub: arn:${AWS::Partition}:s3:::!ImportValue TBReportBucket/* diff --git a/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/__init__.py b/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/app.py b/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/app.py new file mode 100644 index 0000000..0930620 --- /dev/null +++ b/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/app.py @@ -0,0 +1,42 @@ +import json + +# import requests + + +def lambda_handler(event, context): + """Sample pure Lambda function + + Parameters + ---------- + event: dict, required + API Gateway Lambda Proxy Input Format + + Event doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html#api-gateway-simple-proxy-for-lambda-input-format + + context: object, required + Lambda Context runtime methods and attributes + + Context doc: https://docs.aws.amazon.com/lambda/latest/dg/python-context-object.html + + Returns + ------ + API Gateway Lambda Proxy Output Format: dict + + Return doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html + """ + + # try: + # ip = requests.get("http://checkip.amazonaws.com/") + # except requests.RequestException as e: + # # Send some context about this error to Lambda Logs + # print(e) + + # raise e + + return { + "statusCode": 200, + "body": json.dumps({ + "message": "hello world", + # "location": ip.text.replace("\n", "") + }), + } diff --git a/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/config.json b/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/config.json new file mode 100644 index 0000000..258614a --- /dev/null +++ b/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/config.json @@ -0,0 +1,35 @@ +[ + { + "emails": [ + "nmelone@henry-pump.com" + ], + "customers": { + "4d4058a0-b25d-11ef-861c-8dbe77c636e1": { + "name": "RRig Energy", + "deviceTypes": [ + { + "deviceType": "rr_facility", + "dataPoints": [ + "tp_yesterday_total" + ], + "labels":{ + "tp_yesterday_total": "Today Total" + } + }, + { + "deviceType": "rr_pipeline", + "dataPoints": [ + "fm_1_yesterday_total" + ], + "labels":{ + "fm_1_yesterday_total": "Today Total" + } + } + ] + } + }, + "filterDevicesIn": [], + "filterDevicesOut": ["9fd75630-c924-11ef-8af2-917752fdfd64","55dd5960-c949-11ef-a206-4f9798eaa111", "21cebee0-bcb3-11ef-82e8-957b60666c9e"], + "name": "RRig-Eneryg-Monthly-Report" + } +] \ No newline at end of file diff --git a/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/handler.py b/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/handler.py new file mode 100644 index 0000000..2bb3a8d --- /dev/null +++ b/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/handler.py @@ -0,0 +1,6 @@ +import json +def handler(event, context): + # Log the event argument for debugging and for use in local development. + print(json.dumps(event)) + + return {} \ No newline at end of file diff --git a/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/requirements.txt b/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/requirements.txt new file mode 100644 index 0000000..e69de29 diff --git a/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/rrigreport.py b/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/rrigreport.py new file mode 100644 index 0000000..5b1db90 --- /dev/null +++ b/Report Generator/RRig Monthly/.aws-sam/cache/c89700f7-74ed-4d1b-a970-c398fad05879/rrigreport.py @@ -0,0 +1,217 @@ +from tb_rest_client.rest_client_pe import * +from tb_rest_client.rest import ApiException +import json, xlsxwriter, boto3, os, time, pytz, sys +from threading import Lock +from datetime import datetime as dt +from datetime import timedelta as td +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from email import encoders +from email.mime.base import MIMEBase + + +# Define a rate limiter class +class RateLimiter: + def __init__(self, max_calls, period): + self.max_calls = max_calls + self.period = period + self.call_times = [] + self.lock = Lock() + + def acquire(self): + with self.lock: + current_time = time.time() + # Remove expired calls + self.call_times = [t for t in self.call_times if t > current_time - self.period] + if len(self.call_times) >= self.max_calls: + # Wait for the oldest call to expire + time_to_wait = self.period - (current_time - self.call_times[0]) + time.sleep(time_to_wait) + # Register the current call + self.call_times.append(time.time()) + +# Initialize a rate limiter +rate_limiter = RateLimiter(max_calls=10, period=1) # Adjust `max_calls` and `period` as needed + +def sort_dict_keys(d): + """Sorts the keys of all nested dictionaries in a given dictionary. + + Args: + d: The input dictionary. + + Returns: + A new dictionary with sorted keys at each level. + """ + sorted_d = {} + for k, v in d.items(): + if isinstance(v, dict): + sorted_d[k] = sort_dict_keys(v) + else: + sorted_d[k] = v + return dict(sorted(sorted_d.items())) + + +def lambda_handler(event, context): + # Creating Rest Client for ThingsBoard + with RestClientPE(base_url="https://hp.henrypump.cloud") as rest_client: + try: + rest_client.login(username=os.environ["username"], password=os.environ["password"]) + # Loading Config from file + with open("./config.json") as f: + config = json.load(f) + reportData = {} + reportToList = {} + + # Loop through each item in config, each item represents a report + for report in config: + reportToList[report["name"]] = report["emails"] + for customer in report["customers"].keys(): + # Apply rate limiting for API calls + rate_limiter.acquire() + devices = rest_client.get_customer_devices(customer_id=customer, page=0, page_size=1000) + + if report["filterDevicesIn"]: + devices.data = [device for device in devices.data if device.id.id in report["filterDevicesIn"]] + if report["filterDevicesOut"]: + devices.data = [device for device in devices.data if device.id.id not in report["filterDevicesOut"]] + + if not reportData.get(report["name"], None): + reportData[report["name"]] = {} + + for device in devices.data: + for deviceType in report["customers"][customer]["deviceTypes"]: + if device.type == deviceType["deviceType"]: + rate_limiter.acquire() + keys = rest_client.get_timeseries_keys_v1(device.id) + keys = list(filter(lambda x: x in deviceType["dataPoints"], keys)) + #Check for report customer + if not reportData[report["name"]].get(report["customers"][customer]["name"], None): + reportData[report["name"]][report["customers"][customer]["name"]] = {} + #Check for device type in config + if device.type in list(map(lambda x: x["deviceType"], report["customers"][customer]["deviceTypes"])): + + if keys: + rate_limiter.acquire() + # Define the Chicago timezone + chicago_tz = pytz.timezone("America/Chicago") + + # Get today's date in Chicago timezone + today = dt.now(chicago_tz) + + # Get the first day of the current month + first_day_of_this_month = today.replace(day=1,hour=0,minute=9) + + # Calculate the first day of the previous month + last_day_of_last_month = first_day_of_this_month - td(days=1) + first_day_of_previous_month = last_day_of_last_month.replace(day=2) + + # Convert to timestamp in milliseconds + first_day_of_previous_month_timestamp_ms = int(first_day_of_previous_month.timestamp() * 1000) + first_day_of_this_month_timestamp_ms = int(first_day_of_this_month.timestamp() * 1000) + deviceData = rest_client.get_timeseries(entity_id=device.id, keys=",".join(keys),start_ts=first_day_of_previous_month_timestamp_ms, end_ts=first_day_of_this_month_timestamp_ms, agg=None, limit=5000, order_by="ASC") + + # Group data by day and keep the first point per day + filtered_data = {} + deviceDataFiltered = {} + for key, v in deviceData.items(): + for value in v: + # Convert timestamp to datetime in Chicago timezone + dttz = dt.fromtimestamp(value["ts"] / 1000, tz=chicago_tz) + + # Get the calendar day (date part) + day = dttz.date() + + # Keep only the first data point for each day + if day not in filtered_data: + filtered_data[day] = value + # Extract the filtered results + deviceDataFiltered[key] = list(filtered_data.values()) + for x in report["customers"][customer]["deviceTypes"]: + if x["deviceType"] == device.type: + labels = x["labels"] + labelled_data = {} + for k,v in labels.items(): + labelled_data[v] = {} + for k,v in deviceDataFiltered.items(): + labelled_data[labels[k]] = v + reportData[report["name"]][report["customers"][customer]["name"]][device.name] = labelled_data + else: + reportData[report["name"]][report["customers"][customer]["name"]][device.name] = {} + + #Sort Data + reportDataSorted = sort_dict_keys(reportData) + + #print(json.dumps(reportDataSorted,indent=4)) + except ApiException as e: + print(f"API Exception: {e}") + except Exception as e: + print(f"Other Exception in getting data:\n{e}") + print(type(e)) + + + # Create an AWS SES client + ses_client = boto3.client('ses', region_name='us-east-1') + s3 = boto3.resource('s3') + BUCKET_NAME = "thingsboard-email-reports" + # Create a workbook for each report + for report_name, report_data in reportDataSorted.items(): + #will generate an email lower down + spreadsheets = [] + # Create a worksheet for each company + for company_name, device_data in report_data.items(): + workbook = xlsxwriter.Workbook(f"/tmp/{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx",{'strings_to_numbers': True}) + bold = workbook.add_format({'bold': True}) + # Create a sheet for each device type + #for device_type, device_data in company_data.items(): + worksheet = workbook.add_worksheet("Totals") + + # Set the header column with device types + device_names = list(device_data.keys()) + #worksheet.write_column(1, 0, device_names,bold) + worksheet.write_row(0, 1, device_names,bold) + # Write the data to the sheet + for i, (telemetry_name, telemetry_data) in enumerate(device_data.items()): + # Set the header row with telemetry names + telemetry_names = []#list(telemetry_data.keys()) + for _, e in telemetry_data.items(): + for entry in e: + telemetry_names.append(str(dt.fromtimestamp(entry["ts"] / 1000, tz=chicago_tz).date() - td(days=1))) + + #telemetry_names = [dt.fromtimestamp(entry["ts"] / 1000, tz=chicago_tz).date() for _, entry in telemetry_data.items()] + #worksheet.write_row(0, 1, telemetry_names, bold) + worksheet.write_column(1, 0, telemetry_names, bold) + for j, (data_name, data) in enumerate(telemetry_data.items()): + values = [d["value"] for d in data] + #worksheet.write_row(i + 1, j+ 1, values) + worksheet.write_column( j+ 1,i + 1, values) + worksheet.autofit() + workbook.close() + spreadsheets.append(workbook) + + # Store the generated report in S3. + s3.Object(BUCKET_NAME, f'{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx').put(Body=open(f"/tmp/{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx", 'rb')) + if reportToList[report_name]: + # Create an email message + msg = MIMEMultipart() + msg['Subject'] = report_name + msg['From'] = 'alerts@henry-pump.com' + msg['To'] = ", ".join(reportToList[report_name]) + + # Add a text body to the message (optional) + body_text = 'Please find the attached spreadsheets.' + msg.attach(MIMEText(body_text, 'plain')) + + # Attach each workbook in the spreadsheets array + for spreadsheet in spreadsheets: + # Attach the file to the email message + attachment = MIMEBase('application', 'octet-stream') + attachment.set_payload(open(spreadsheet.filename, "rb").read()) + encoders.encode_base64(attachment) + attachment.add_header('Content-Disposition', 'attachment', filename=spreadsheet.filename[5:]) + + msg.attach(attachment) + # Send the email using AWS SES + response = ses_client.send_raw_email( + + RawMessage={'Data': msg.as_string()} + ) \ No newline at end of file diff --git a/Report Generator/RRig Monthly/README.TOOLKIT.md b/Report Generator/RRig Monthly/README.TOOLKIT.md new file mode 100644 index 0000000..a504283 --- /dev/null +++ b/Report Generator/RRig Monthly/README.TOOLKIT.md @@ -0,0 +1,36 @@ +# Developing AWS SAM Applications with the AWS Toolkit For Visual Studio Code + +This project contains source code and supporting files for a serverless application that you can locally run, debug, and deploy to AWS with the AWS Toolkit For Visual Studio Code. + +A "SAM" (serverless application model) project is a project that contains a template.yaml file which is understood by AWS tooling (such as SAM CLI, and the AWS Toolkit For Visual Studio Code). + +## Writing and Debugging Serverless Applications + +The code for this application will differ based on the runtime, but the path to a handler can be found in the [`template.yaml`](./template.yaml) file through a resource's `CodeUri` and `Handler` fields. + +AWS Toolkit For Visual Studio Code supports local debugging for serverless applications through VS Code's debugger. Since this application was created by the AWS Toolkit, launch configurations for all included handlers have been generated and can be found in the menu next to the Run button: + + +You can debug the Lambda handlers locally by adding a breakpoint to the source file, then running the launch configuration. This works by using Docker on your local machine. + +Invocation parameters, including payloads and request parameters, can be edited either by the `Edit SAM Debug Configuration` command (through the Command Palette or CodeLens) or by editing the `launch.json` file. + +AWS Lambda functions not defined in the [`template.yaml`](./template.yaml) file can be invoked and debugged by creating a launch configuration through the CodeLens over the function declaration, or with the `Add SAM Debug Configuration` command. + +## Deploying Serverless Applications + +You can deploy a serverless application by invoking the `AWS: Deploy SAM application` command through the Command Palette or by right-clicking the Lambda node in the AWS Explorer and entering the deployment region, a valid S3 bucket from the region, and the name of a CloudFormation stack to deploy to. You can monitor your deployment's progress through the `AWS Toolkit` Output Channel. + +## Interacting With Deployed Serverless Applications + +A successfully-deployed serverless application can be found in the AWS Explorer under region and CloudFormation node that the serverless application was deployed to. + +In the AWS Explorer, you can invoke _remote_ AWS Lambda Functions by right-clicking the Lambda node and selecting "Invoke on AWS". + +Similarly, if the Function declaration contained an API Gateway event, the API Gateway API can be found in the API Gateway node under the region node the serverless application was deployed to, and can be invoked via right-clicking the API node and selecting "Invoke on AWS". + +## Resources + +General information about this SAM project can be found in the [`README.md`](./README.md) file in this folder. + +More information about using the AWS Toolkit For Visual Studio Code with serverless applications can be found [in the AWS documentation](https://docs.aws.amazon.com/toolkit-for-vscode/latest/userguide/serverless-apps.html) . diff --git a/Report Generator/RRig Monthly/README.md b/Report Generator/RRig Monthly/README.md new file mode 100644 index 0000000..5ae680f --- /dev/null +++ b/Report Generator/RRig Monthly/README.md @@ -0,0 +1,130 @@ +# lambda-python3.12 + +This project contains source code and supporting files for a serverless application that you can deploy with the SAM CLI. It includes the following files and folders. + +- hello_world - Code for the application's Lambda function. +- events - Invocation events that you can use to invoke the function. +- tests - Unit tests for the application code. +- template.yaml - A template that defines the application's AWS resources. + +The application uses several AWS resources, including Lambda functions and an API Gateway API. These resources are defined in the `template.yaml` file in this project. You can update the template to add AWS resources through the same deployment process that updates your application code. + +If you prefer to use an integrated development environment (IDE) to build and test your application, you can use the AWS Toolkit. +The AWS Toolkit is an open source plug-in for popular IDEs that uses the SAM CLI to build and deploy serverless applications on AWS. The AWS Toolkit also adds a simplified step-through debugging experience for Lambda function code. See the following links to get started. + +* [CLion](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [GoLand](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [IntelliJ](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [WebStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [Rider](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [PhpStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [PyCharm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [RubyMine](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [DataGrip](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [VS Code](https://docs.aws.amazon.com/toolkit-for-vscode/latest/userguide/welcome.html) +* [Visual Studio](https://docs.aws.amazon.com/toolkit-for-visual-studio/latest/user-guide/welcome.html) + +## Deploy the sample application + +The Serverless Application Model Command Line Interface (SAM CLI) is an extension of the AWS CLI that adds functionality for building and testing Lambda applications. It uses Docker to run your functions in an Amazon Linux environment that matches Lambda. It can also emulate your application's build environment and API. + +To use the SAM CLI, you need the following tools. + +* SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) +* [Python 3 installed](https://www.python.org/downloads/) +* Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) + +To build and deploy your application for the first time, run the following in your shell: + +```bash +sam build --use-container +sam deploy --guided +``` + +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: + +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modifies IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. + +You can find your API Gateway Endpoint URL in the output values displayed after deployment. + +## Use the SAM CLI to build and test locally + +Build your application with the `sam build --use-container` command. + +```bash +lambda-python3.12$ sam build --use-container +``` + +The SAM CLI installs dependencies defined in `hello_world/requirements.txt`, creates a deployment package, and saves it in the `.aws-sam/build` folder. + +Test a single function by invoking it directly with a test event. An event is a JSON document that represents the input that the function receives from the event source. Test events are included in the `events` folder in this project. + +Run functions locally and invoke them with the `sam local invoke` command. + +```bash +lambda-python3.12$ sam local invoke HelloWorldFunction --event events/event.json +``` + +The SAM CLI can also emulate your application's API. Use the `sam local start-api` to run the API locally on port 3000. + +```bash +lambda-python3.12$ sam local start-api +lambda-python3.12$ curl http://localhost:3000/ +``` + +The SAM CLI reads the application template to determine the API's routes and the functions that they invoke. The `Events` property on each function's definition includes the route and method for each path. + +```yaml + Events: + HelloWorld: + Type: Api + Properties: + Path: /hello + Method: get +``` + +## Add a resource to your application +The application template uses AWS Serverless Application Model (AWS SAM) to define application resources. AWS SAM is an extension of AWS CloudFormation with a simpler syntax for configuring common serverless application resources such as functions, triggers, and APIs. For resources not included in [the SAM specification](https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md), you can use standard [AWS CloudFormation](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) resource types. + +## Fetch, tail, and filter Lambda function logs + +To simplify troubleshooting, SAM CLI has a command called `sam logs`. `sam logs` lets you fetch logs generated by your deployed Lambda function from the command line. In addition to printing the logs on the terminal, this command has several nifty features to help you quickly find the bug. + +`NOTE`: This command works for all AWS Lambda functions; not just the ones you deploy using SAM. + +```bash +lambda-python3.12$ sam logs -n HelloWorldFunction --stack-name "lambda-python3.12" --tail +``` + +You can find more information and examples about filtering Lambda function logs in the [SAM CLI Documentation](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-logging.html). + +## Tests + +Tests are defined in the `tests` folder in this project. Use PIP to install the test dependencies and run tests. + +```bash +lambda-python3.12$ pip install -r tests/requirements.txt --user +# unit test +lambda-python3.12$ python -m pytest tests/unit -v +# integration test, requiring deploying the stack first. +# Create the env variable AWS_SAM_STACK_NAME with the name of the stack we are testing +lambda-python3.12$ AWS_SAM_STACK_NAME="lambda-python3.12" python -m pytest tests/integration -v +``` + +## Cleanup + +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: + +```bash +sam delete --stack-name "lambda-python3.12" +``` + +## Resources + +See the [AWS SAM developer guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html) for an introduction to SAM specification, the SAM CLI, and serverless application concepts. + +Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/) diff --git a/Report Generator/RRig Monthly/__init__.py b/Report Generator/RRig Monthly/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Report Generator/RRig Monthly/build-deploy.sh b/Report Generator/RRig Monthly/build-deploy.sh new file mode 100755 index 0000000..d93409a --- /dev/null +++ b/Report Generator/RRig Monthly/build-deploy.sh @@ -0,0 +1,2 @@ +DOCKER_HOST=unix:///Users/nico/.docker/run/docker.sock sam build --use-container +DOCKER_HOST=unix:///Users/nico/.docker/run/docker.sock sam deploy diff --git a/Report Generator/RRig Monthly/build-local-test.sh b/Report Generator/RRig Monthly/build-local-test.sh new file mode 100755 index 0000000..58d7c4f --- /dev/null +++ b/Report Generator/RRig Monthly/build-local-test.sh @@ -0,0 +1,2 @@ +DOCKER_HOST=unix:///Users/nico/.docker/run/docker.sock sam build --use-container +DOCKER_HOST=unix:///Users/nico/.docker/run/docker.sock sam local invoke diff --git a/Report Generator/RRig Monthly/events/event.json b/Report Generator/RRig Monthly/events/event.json new file mode 100644 index 0000000..a6197de --- /dev/null +++ b/Report Generator/RRig Monthly/events/event.json @@ -0,0 +1,62 @@ +{ + "body": "{\"message\": \"hello world\"}", + "resource": "/hello", + "path": "/hello", + "httpMethod": "GET", + "isBase64Encoded": false, + "queryStringParameters": { + "foo": "bar" + }, + "pathParameters": { + "proxy": "/path/to/resource" + }, + "stageVariables": { + "baz": "qux" + }, + "headers": { + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", + "Accept-Encoding": "gzip, deflate, sdch", + "Accept-Language": "en-US,en;q=0.8", + "Cache-Control": "max-age=0", + "CloudFront-Forwarded-Proto": "https", + "CloudFront-Is-Desktop-Viewer": "true", + "CloudFront-Is-Mobile-Viewer": "false", + "CloudFront-Is-SmartTV-Viewer": "false", + "CloudFront-Is-Tablet-Viewer": "false", + "CloudFront-Viewer-Country": "US", + "Host": "1234567890.execute-api.us-east-1.amazonaws.com", + "Upgrade-Insecure-Requests": "1", + "User-Agent": "Custom User Agent String", + "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", + "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", + "X-Forwarded-For": "127.0.0.1, 127.0.0.2", + "X-Forwarded-Port": "443", + "X-Forwarded-Proto": "https" + }, + "requestContext": { + "accountId": "123456789012", + "resourceId": "123456", + "stage": "prod", + "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", + "requestTime": "09/Apr/2015:12:34:56 +0000", + "requestTimeEpoch": 1428582896000, + "identity": { + "cognitoIdentityPoolId": null, + "accountId": null, + "cognitoIdentityId": null, + "caller": null, + "accessKey": null, + "sourceIp": "127.0.0.1", + "cognitoAuthenticationType": null, + "cognitoAuthenticationProvider": null, + "userArn": null, + "userAgent": "Custom User Agent String", + "user": null + }, + "path": "/prod/hello", + "resourcePath": "/hello", + "httpMethod": "POST", + "apiId": "1234567890", + "protocol": "HTTP/1.1" + } +} diff --git a/Report Generator/RRig Monthly/rrigreport/__init__.py b/Report Generator/RRig Monthly/rrigreport/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Report Generator/RRig Monthly/rrigreport/app.py b/Report Generator/RRig Monthly/rrigreport/app.py new file mode 100644 index 0000000..0930620 --- /dev/null +++ b/Report Generator/RRig Monthly/rrigreport/app.py @@ -0,0 +1,42 @@ +import json + +# import requests + + +def lambda_handler(event, context): + """Sample pure Lambda function + + Parameters + ---------- + event: dict, required + API Gateway Lambda Proxy Input Format + + Event doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html#api-gateway-simple-proxy-for-lambda-input-format + + context: object, required + Lambda Context runtime methods and attributes + + Context doc: https://docs.aws.amazon.com/lambda/latest/dg/python-context-object.html + + Returns + ------ + API Gateway Lambda Proxy Output Format: dict + + Return doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html + """ + + # try: + # ip = requests.get("http://checkip.amazonaws.com/") + # except requests.RequestException as e: + # # Send some context about this error to Lambda Logs + # print(e) + + # raise e + + return { + "statusCode": 200, + "body": json.dumps({ + "message": "hello world", + # "location": ip.text.replace("\n", "") + }), + } diff --git a/Report Generator/RRig Monthly/rrigreport/config.json b/Report Generator/RRig Monthly/rrigreport/config.json new file mode 100644 index 0000000..9d3160e --- /dev/null +++ b/Report Generator/RRig Monthly/rrigreport/config.json @@ -0,0 +1,35 @@ +[ + { + "emails": [ + "nmelone@henry-pump.com" + ], + "customers": { + "4d4058a0-b25d-11ef-861c-8dbe77c636e1": { + "name": "RRig Energy", + "deviceTypes": [ + { + "deviceType": "rr_facility", + "dataPoints": [ + "tp_yesterday_total" + ], + "labels":{ + "tp_yesterday_total": "Today Total" + } + }, + { + "deviceType": "rr_pipeline", + "dataPoints": [ + "fm_1_yesterday_total" + ], + "labels":{ + "fm_1_yesterday_total": "Today Total" + } + } + ] + } + }, + "filterDevicesIn": [], + "filterDevicesOut": ["9fd75630-c924-11ef-8af2-917752fdfd64","55dd5960-c949-11ef-a206-4f9798eaa111", "21cebee0-bcb3-11ef-82e8-957b60666c9e"], + "name": "RRig-Energy-Monthly-Report" + } +] \ No newline at end of file diff --git a/Report Generator/RRig Monthly/rrigreport/handler.py b/Report Generator/RRig Monthly/rrigreport/handler.py new file mode 100644 index 0000000..2bb3a8d --- /dev/null +++ b/Report Generator/RRig Monthly/rrigreport/handler.py @@ -0,0 +1,6 @@ +import json +def handler(event, context): + # Log the event argument for debugging and for use in local development. + print(json.dumps(event)) + + return {} \ No newline at end of file diff --git a/Report Generator/RRig Monthly/rrigreport/requirements.txt b/Report Generator/RRig Monthly/rrigreport/requirements.txt new file mode 100644 index 0000000..e69de29 diff --git a/Report Generator/RRig Monthly/rrigreport/rrigreport.py b/Report Generator/RRig Monthly/rrigreport/rrigreport.py new file mode 100644 index 0000000..2b7489a --- /dev/null +++ b/Report Generator/RRig Monthly/rrigreport/rrigreport.py @@ -0,0 +1,217 @@ +from tb_rest_client.rest_client_pe import * +from tb_rest_client.rest import ApiException +import json, xlsxwriter, boto3, os, time, pytz, sys +from threading import Lock +from datetime import datetime as dt +from datetime import timedelta as td +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from email import encoders +from email.mime.base import MIMEBase + + +# Define a rate limiter class +class RateLimiter: + def __init__(self, max_calls, period): + self.max_calls = max_calls + self.period = period + self.call_times = [] + self.lock = Lock() + + def acquire(self): + with self.lock: + current_time = time.time() + # Remove expired calls + self.call_times = [t for t in self.call_times if t > current_time - self.period] + if len(self.call_times) >= self.max_calls: + # Wait for the oldest call to expire + time_to_wait = self.period - (current_time - self.call_times[0]) + time.sleep(time_to_wait) + # Register the current call + self.call_times.append(time.time()) + +# Initialize a rate limiter +rate_limiter = RateLimiter(max_calls=10, period=1) # Adjust `max_calls` and `period` as needed + +def sort_dict_keys(d): + """Sorts the keys of all nested dictionaries in a given dictionary. + + Args: + d: The input dictionary. + + Returns: + A new dictionary with sorted keys at each level. + """ + sorted_d = {} + for k, v in d.items(): + if isinstance(v, dict): + sorted_d[k] = sort_dict_keys(v) + else: + sorted_d[k] = v + return dict(sorted(sorted_d.items())) + + +def lambda_handler(event, context): + # Creating Rest Client for ThingsBoard + with RestClientPE(base_url="https://hp.henrypump.cloud") as rest_client: + try: + rest_client.login(username=os.environ["username"], password=os.environ["password"]) + # Loading Config from file + with open("./config.json") as f: + config = json.load(f) + reportData = {} + reportToList = {} + + # Loop through each item in config, each item represents a report + for report in config: + reportToList[report["name"]] = report["emails"] + for customer in report["customers"].keys(): + # Apply rate limiting for API calls + rate_limiter.acquire() + devices = rest_client.get_customer_devices(customer_id=customer, page=0, page_size=1000) + + if report["filterDevicesIn"]: + devices.data = [device for device in devices.data if device.id.id in report["filterDevicesIn"]] + if report["filterDevicesOut"]: + devices.data = [device for device in devices.data if device.id.id not in report["filterDevicesOut"]] + + if not reportData.get(report["name"], None): + reportData[report["name"]] = {} + + for device in devices.data: + for deviceType in report["customers"][customer]["deviceTypes"]: + if device.type == deviceType["deviceType"]: + rate_limiter.acquire() + keys = rest_client.get_timeseries_keys_v1(device.id) + keys = list(filter(lambda x: x in deviceType["dataPoints"], keys)) + #Check for report customer + if not reportData[report["name"]].get(report["customers"][customer]["name"], None): + reportData[report["name"]][report["customers"][customer]["name"]] = {} + #Check for device type in config + if device.type in list(map(lambda x: x["deviceType"], report["customers"][customer]["deviceTypes"])): + + if keys: + rate_limiter.acquire() + # Define the Chicago timezone + chicago_tz = pytz.timezone("America/Chicago") + + # Get today's date in Chicago timezone + today = dt.now(chicago_tz) + + # Get the first day of the current month + first_day_of_this_month = today.replace(day=1,hour=0,minute=9) + + # Calculate the first day of the previous month + last_day_of_last_month = first_day_of_this_month - td(days=1) + first_day_of_previous_month = last_day_of_last_month.replace(day=2) + + # Convert to timestamp in milliseconds + first_day_of_previous_month_timestamp_ms = int(first_day_of_previous_month.timestamp() * 1000) + first_day_of_this_month_timestamp_ms = int(first_day_of_this_month.timestamp() * 1000) + deviceData = rest_client.get_timeseries(entity_id=device.id, keys=",".join(keys),start_ts=first_day_of_previous_month_timestamp_ms, end_ts=first_day_of_this_month_timestamp_ms, agg=None, limit=5000, order_by="ASC") + + # Group data by day and keep the first point per day + filtered_data = {} + deviceDataFiltered = {} + for key, v in deviceData.items(): + for value in v: + # Convert timestamp to datetime in Chicago timezone + dttz = dt.fromtimestamp(value["ts"] / 1000, tz=chicago_tz) + + # Get the calendar day (date part) + day = dttz.date() + + # Keep only the first data point for each day + if day not in filtered_data: + filtered_data[day] = value + # Extract the filtered results + deviceDataFiltered[key] = list(filtered_data.values()) + for x in report["customers"][customer]["deviceTypes"]: + if x["deviceType"] == device.type: + labels = x["labels"] + labelled_data = {} + for k,v in labels.items(): + labelled_data[v] = {} + for k,v in deviceDataFiltered.items(): + labelled_data[labels[k]] = v + reportData[report["name"]][report["customers"][customer]["name"]][device.name] = labelled_data + else: + reportData[report["name"]][report["customers"][customer]["name"]][device.name] = {} + + #Sort Data + reportDataSorted = sort_dict_keys(reportData) + + #print(json.dumps(reportDataSorted,indent=4)) + except ApiException as e: + print(f"API Exception: {e}") + except Exception as e: + print(f"Other Exception in getting data:\n{e}") + print(type(e)) + + + # Create an AWS SES client + ses_client = boto3.client('ses', region_name='us-east-1') + s3 = boto3.resource('s3') + BUCKET_NAME = "thingsboard-email-reports" + # Create a workbook for each report + for report_name, report_data in reportDataSorted.items(): + #will generate an email lower down + spreadsheets = [] + # Create a worksheet for each company + for company_name, device_data in report_data.items(): + workbook = xlsxwriter.Workbook(f"/tmp/{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx",{'strings_to_numbers': True}) + bold = workbook.add_format({'bold': True}) + # Create a sheet for each device type + #for device_type, device_data in company_data.items(): + worksheet = workbook.add_worksheet("Totals") + + # Set the header column with device types + device_names = list(device_data.keys()) + #worksheet.write_column(1, 0, device_names,bold) + worksheet.write_row(0, 1, device_names,bold) + # Write the data to the sheet + for i, (telemetry_name, telemetry_data) in enumerate(device_data.items()): + # Set the header row with telemetry names + telemetry_names = []#list(telemetry_data.keys()) + for _, e in telemetry_data.items(): + for entry in e: + telemetry_names.append(str(dt.fromtimestamp(entry["ts"] / 1000, tz=chicago_tz).date() - td(days=1))) + + #telemetry_names = [dt.fromtimestamp(entry["ts"] / 1000, tz=chicago_tz).date() for _, entry in telemetry_data.items()] + #worksheet.write_row(0, 1, telemetry_names, bold) + worksheet.write_column(1, 0, telemetry_names, bold) + for j, (data_name, data) in enumerate(telemetry_data.items()): + values = [d["value"] for d in data] + #worksheet.write_row(i + 1, j+ 1, values) + worksheet.write_column( j+ 1,i + 1, values) + worksheet.autofit() + workbook.close() + spreadsheets.append(workbook) + + # Store the generated report in S3. + s3.Object(BUCKET_NAME, f'{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx').put(Body=open(f"/tmp/{report_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx", 'rb')) + if reportToList[report_name]: + # Create an email message + msg = MIMEMultipart() + msg['Subject'] = report_name + msg['From'] = 'alerts@henry-pump.com' + msg['To'] = ", ".join(reportToList[report_name]) + + # Add a text body to the message (optional) + body_text = 'Please find the attached spreadsheets.' + msg.attach(MIMEText(body_text, 'plain')) + + # Attach each workbook in the spreadsheets array + for spreadsheet in spreadsheets: + # Attach the file to the email message + attachment = MIMEBase('application', 'octet-stream') + attachment.set_payload(open(spreadsheet.filename, "rb").read()) + encoders.encode_base64(attachment) + attachment.add_header('Content-Disposition', 'attachment', filename=spreadsheet.filename[5:]) + + msg.attach(attachment) + # Send the email using AWS SES + response = ses_client.send_raw_email( + + RawMessage={'Data': msg.as_string()} + ) \ No newline at end of file diff --git a/Report Generator/RRig Monthly/samconfig.toml b/Report Generator/RRig Monthly/samconfig.toml new file mode 100644 index 0000000..756f4ba --- /dev/null +++ b/Report Generator/RRig Monthly/samconfig.toml @@ -0,0 +1,36 @@ +# More information about the configuration file can be found here: +# https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html +version = 0.1 + +[default] +[default.global.parameters] +stack_name = "lambda-python3.12" + +[default.build.parameters] +cached = true +parallel = true + +[default.validate.parameters] +lint = true + +[default.deploy.parameters] +capabilities = "CAPABILITY_IAM" +confirm_changeset = true +resolve_s3 = true +stack_name = "rrigreport" +s3_prefix = "tbreport" +region = "us-east-1" +image_repositories = [] +disable_rollback = true + +[default.package.parameters] +resolve_s3 = true + +[default.sync.parameters] +watch = true + +[default.local_start_api.parameters] +warm_containers = "EAGER" + +[default.local_start_lambda.parameters] +warm_containers = "EAGER" diff --git a/Report Generator/RRig Monthly/tbreportlayer/requirements.txt b/Report Generator/RRig Monthly/tbreportlayer/requirements.txt new file mode 100644 index 0000000..3c52dac --- /dev/null +++ b/Report Generator/RRig Monthly/tbreportlayer/requirements.txt @@ -0,0 +1,3 @@ +xlsxwriter +tb-rest-client==3.7.0 +pytz \ No newline at end of file diff --git a/Report Generator/RRig Monthly/template.yaml b/Report Generator/RRig Monthly/template.yaml new file mode 100644 index 0000000..fbbf3fd --- /dev/null +++ b/Report Generator/RRig Monthly/template.yaml @@ -0,0 +1,72 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: | + RRig Monthly Totals Report + +# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst +Globals: + Function: + Timeout: 3 + +Resources: + RRigReport: + Type: AWS::Serverless::Function # More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction + Properties: + MemorySize: 128 + Timeout: 300 + Environment: + Variables: + username: henry.pump.automation@gmail.com + password: Henry Pump @ 2022 + RRIGREPORTBUCKET_BUCKET_NAME: !ImportValue TBReportBucket + Architectures: + - arm64 + CodeUri: rrigreport + Runtime: python3.12 + Handler: rrigreport.lambda_handler + Policies: + - AmazonSESFullAccess + - Statement: + - Effect: Allow + Action: + - s3:PutObject + Resource: + - !Sub arn:${AWS::Partition}:s3:::!ImportValue TBReportBucket + - !Sub arn:${AWS::Partition}:s3:::!ImportValue TBReportBucket/* + Layers: + - !ImportValue TBReportLayer + + RRigSchedule: + Type: AWS::Scheduler::Schedule + Properties: + ScheduleExpression: cron(5 7 1 * ? *) + FlexibleTimeWindow: + Mode: 'OFF' + ScheduleExpressionTimezone: America/Chicago + Target: + Arn: !GetAtt RRigReport.Arn + RoleArn: !GetAtt RRigScheduleToRRigReportRole.Arn + RRigScheduleToRRigReportRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: '2012-10-17' + Statement: + Effect: Allow + Principal: + Service: !Sub scheduler.${AWS::URLSuffix} + Action: sts:AssumeRole + Policies: + - PolicyName: StartExecutionPolicy + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: + - lambda:InvokeFunction + - s3:GetObject + - s3:PutObject + Resource: + - !GetAtt RRigReport.Arn + - !Sub arn:${AWS::Partition}:s3:::!ImportValue TBReportBucket + - !Sub arn:${AWS::Partition}:s3:::!ImportValue TBReportBucket/* \ No newline at end of file diff --git a/Report Generator/RRig Monthly/tests/__init__.py b/Report Generator/RRig Monthly/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Report Generator/RRig Monthly/tests/integration/__init__.py b/Report Generator/RRig Monthly/tests/integration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Report Generator/RRig Monthly/tests/integration/test_api_gateway.py b/Report Generator/RRig Monthly/tests/integration/test_api_gateway.py new file mode 100644 index 0000000..b96e803 --- /dev/null +++ b/Report Generator/RRig Monthly/tests/integration/test_api_gateway.py @@ -0,0 +1,45 @@ +import os + +import boto3 +import pytest +import requests + +""" +Make sure env variable AWS_SAM_STACK_NAME exists with the name of the stack we are going to test. +""" + + +class TestApiGateway: + + @pytest.fixture() + def api_gateway_url(self): + """ Get the API Gateway URL from Cloudformation Stack outputs """ + stack_name = os.environ.get("AWS_SAM_STACK_NAME") + + if stack_name is None: + raise ValueError('Please set the AWS_SAM_STACK_NAME environment variable to the name of your stack') + + client = boto3.client("cloudformation") + + try: + response = client.describe_stacks(StackName=stack_name) + except Exception as e: + raise Exception( + f"Cannot find stack {stack_name} \n" f'Please make sure a stack with the name "{stack_name}" exists' + ) from e + + stacks = response["Stacks"] + stack_outputs = stacks[0]["Outputs"] + api_outputs = [output for output in stack_outputs if output["OutputKey"] == "HelloWorldApi"] + + if not api_outputs: + raise KeyError(f"HelloWorldAPI not found in stack {stack_name}") + + return api_outputs[0]["OutputValue"] # Extract url from stack outputs + + def test_api_gateway(self, api_gateway_url): + """ Call the API Gateway endpoint and check the response """ + response = requests.get(api_gateway_url) + + assert response.status_code == 200 + assert response.json() == {"message": "hello world"} diff --git a/Report Generator/RRig Monthly/tests/requirements.txt b/Report Generator/RRig Monthly/tests/requirements.txt new file mode 100644 index 0000000..b9cf27a --- /dev/null +++ b/Report Generator/RRig Monthly/tests/requirements.txt @@ -0,0 +1,3 @@ +pytest +boto3 +requests diff --git a/Report Generator/RRig Monthly/tests/unit/__init__.py b/Report Generator/RRig Monthly/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Report Generator/RRig Monthly/tests/unit/test_handler.py b/Report Generator/RRig Monthly/tests/unit/test_handler.py new file mode 100644 index 0000000..d98ce57 --- /dev/null +++ b/Report Generator/RRig Monthly/tests/unit/test_handler.py @@ -0,0 +1,72 @@ +import json + +import pytest + +from hello_world import app + + +@pytest.fixture() +def apigw_event(): + """ Generates API GW Event""" + + return { + "body": '{ "test": "body"}', + "resource": "/{proxy+}", + "requestContext": { + "resourceId": "123456", + "apiId": "1234567890", + "resourcePath": "/{proxy+}", + "httpMethod": "POST", + "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", + "accountId": "123456789012", + "identity": { + "apiKey": "", + "userArn": "", + "cognitoAuthenticationType": "", + "caller": "", + "userAgent": "Custom User Agent String", + "user": "", + "cognitoIdentityPoolId": "", + "cognitoIdentityId": "", + "cognitoAuthenticationProvider": "", + "sourceIp": "127.0.0.1", + "accountId": "", + }, + "stage": "prod", + }, + "queryStringParameters": {"foo": "bar"}, + "headers": { + "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", + "Accept-Language": "en-US,en;q=0.8", + "CloudFront-Is-Desktop-Viewer": "true", + "CloudFront-Is-SmartTV-Viewer": "false", + "CloudFront-Is-Mobile-Viewer": "false", + "X-Forwarded-For": "127.0.0.1, 127.0.0.2", + "CloudFront-Viewer-Country": "US", + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", + "Upgrade-Insecure-Requests": "1", + "X-Forwarded-Port": "443", + "Host": "1234567890.execute-api.us-east-1.amazonaws.com", + "X-Forwarded-Proto": "https", + "X-Amz-Cf-Id": "aaaaaaaaaae3VYQb9jd-nvCd-de396Uhbp027Y2JvkCPNLmGJHqlaA==", + "CloudFront-Is-Tablet-Viewer": "false", + "Cache-Control": "max-age=0", + "User-Agent": "Custom User Agent String", + "CloudFront-Forwarded-Proto": "https", + "Accept-Encoding": "gzip, deflate, sdch", + }, + "pathParameters": {"proxy": "/examplepath"}, + "httpMethod": "POST", + "stageVariables": {"baz": "qux"}, + "path": "/examplepath", + } + + +def test_lambda_handler(apigw_event): + + ret = app.lambda_handler(apigw_event, "") + data = json.loads(ret["body"]) + + assert ret["statusCode"] == 200 + assert "message" in ret["body"] + assert data["message"] == "hello world" diff --git a/Report Generator/lambda-python3.12/tbreport/config.json b/Report Generator/lambda-python3.12/tbreport/config.json index de8d11f..8867916 100644 --- a/Report Generator/lambda-python3.12/tbreport/config.json +++ b/Report Generator/lambda-python3.12/tbreport/config.json @@ -1,7 +1,7 @@ [ { "emails": [ - "nmelone@henry-pump.com" + ], "customers": { "ec691940-52e2-11ec-a919-556e8dbef35c": { diff --git a/Report Generator/lambda-python3.12/tbreport/tbreport.py b/Report Generator/lambda-python3.12/tbreport/tbreport.py index b9c63f8..9851af1 100644 --- a/Report Generator/lambda-python3.12/tbreport/tbreport.py +++ b/Report Generator/lambda-python3.12/tbreport/tbreport.py @@ -146,29 +146,30 @@ def lambda_handler(event, context): # Store the generated report in S3. s3.Object(BUCKET_NAME, f'{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx').put(Body=open(f"/tmp/{report_name}-{company_name}-{dt.today().strftime('%Y-%m-%d')}.xlsx", 'rb')) - # Create an email message - msg = MIMEMultipart() - msg['Subject'] = report_name - msg['From'] = 'alerts@henry-pump.com' - msg['To'] = ", ".join(reportToList[report_name]) + if reportToList[report_name]: + # Create an email message + msg = MIMEMultipart() + msg['Subject'] = report_name + msg['From'] = 'alerts@henry-pump.com' + msg['To'] = ", ".join(reportToList[report_name]) - # Add a text body to the message (optional) - body_text = 'Please find the attached spreadsheets.' - msg.attach(MIMEText(body_text, 'plain')) + # Add a text body to the message (optional) + body_text = 'Please find the attached spreadsheets.' + msg.attach(MIMEText(body_text, 'plain')) - # Attach each workbook in the spreadsheets array - for spreadsheet in spreadsheets: - # Attach the file to the email message - attachment = MIMEBase('application', 'octet-stream') - attachment.set_payload(open(spreadsheet.filename, "rb").read()) - encoders.encode_base64(attachment) - attachment.add_header('Content-Disposition', 'attachment', filename=spreadsheet.filename[5:]) + # Attach each workbook in the spreadsheets array + for spreadsheet in spreadsheets: + # Attach the file to the email message + attachment = MIMEBase('application', 'octet-stream') + attachment.set_payload(open(spreadsheet.filename, "rb").read()) + encoders.encode_base64(attachment) + attachment.add_header('Content-Disposition', 'attachment', filename=spreadsheet.filename[5:]) - msg.attach(attachment) - # Send the email using AWS SES - response = ses_client.send_raw_email( - - RawMessage={'Data': msg.as_string()} - ) + msg.attach(attachment) + # Send the email using AWS SES + response = ses_client.send_raw_email( + + RawMessage={'Data': msg.as_string()} + ) diff --git a/Report Generator/lambda-python3.12/tbreportlayer/requirements.txt b/Report Generator/lambda-python3.12/tbreportlayer/requirements.txt index 95247e1..3c52dac 100644 --- a/Report Generator/lambda-python3.12/tbreportlayer/requirements.txt +++ b/Report Generator/lambda-python3.12/tbreportlayer/requirements.txt @@ -1,2 +1,3 @@ xlsxwriter -tb-rest-client \ No newline at end of file +tb-rest-client==3.7.0 +pytz \ No newline at end of file diff --git a/Report Generator/lambda-python3.12/template.yaml b/Report Generator/lambda-python3.12/template.yaml index e7bbdcd..57842b2 100644 --- a/Report Generator/lambda-python3.12/template.yaml +++ b/Report Generator/lambda-python3.12/template.yaml @@ -104,4 +104,14 @@ Resources: - !Sub ${TBReportBucket.Arn}/* Condition: Bool: - aws:SecureTransport: 'false' \ No newline at end of file + aws:SecureTransport: 'false' +Outputs: + TBReportLayerExport: + Value: !Ref TBReportLayer + Export: + Name: TBReportLayer + + TBReportBucketExport: + Value: !Ref TBReportBucket + Export: + Name: TBReportBucket