Files
channel-combiner/lambda_function.py
Nico Melone 0c7d232b77 initial
2021-04-28 22:38:30 -04:00

64 lines
2.2 KiB
Python

import pandas as pd
import os
import boto3
import logging
import json
s3 = boto3.client('s3')
BUCKET_NAME = "channel-combiner"
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def lambda_handler(event,context):
logger.info(event)
KEYS = list(event['queryStringParameters']['filenames'].split(","))
resample_rate = event['queryStringParameters']['resample']
objects = []
try:
for KEY in KEYS:
s3.download_file(BUCKET_NAME, KEY, '/tmp/{}'.format(KEY))
objects.append({'Key': KEY})
except Exception as e:
logger.error("Something went wrong in S3 download: {}".format(e))
return {
'statusCode': 500,
'headers': {'Access-Control-Allow-Origin': '*'},
'body': json.dumps("Something went wrong in S3 download")
}
df = [pd.read_csv('/tmp/{}'.format(x), names=['Date',x[:-26]], header=0, index_col="Date",parse_dates=True, low_memory=False) for x in KEYS]
#print(df)
for i in range(len(df)):
df[i] = df[i].resample(resample_rate).pad()
df_all = pd.concat(df, axis=1)
#print(df_all)
df_all.to_csv("/tmp/output.csv")
try:
s3.upload_file("/tmp/output.csv", BUCKET_NAME, "output.csv" )
s3_resource = boto3.resource('s3')
object_acl = s3_resource.ObjectAcl(BUCKET_NAME,'output.csv')
response = object_acl.put(ACL='public-read')
except Exception as e:
logger.error("Something went wrong in S3 upload: {} ".format(e))
return {
'statusCode': 500,
'headers': {'Access-Control-Allow-Origin': '*'},
'body': json.dumps("Something went wrong in S3 upload")
}
DELETE = {'Objects': objects, 'Quiet': True}
logger.info(DELETE)
try:
s3.delete_objects(Bucket=BUCKET_NAME, Delete=DELETE)
except Exception as e:
logger.error("Something went wrong in S3 delete: {} ".format(e))
return {
'statusCode': 500,
'headers': {'Access-Control-Allow-Origin': '*'},
'body': json.dumps("Something went wrong in S3 delete")
}
return {
'statusCode': 200,
'headers': {'Access-Control-Allow-Origin': '*'},
'body': json.dumps("Processing Complete!")
}