I want to create lambda function which upload csv file to s3 bucket. To performance test, I'm trying to configure test event (using s3_put defaul template only put my bucket name and new file name).
test_event
{
"Records": [
{
"eventVersion": "2.0",
"eventSource": "aws:s3",
"awsRegion": "eu-central-1",
"eventTime": "1970-01-01T00:00:00.000Z",
"eventName": "ObjectCreated:Put",
"userIdentity": {
"principalId": "EXAMPLE"
},
"requestParameters": {
"sourceIPAddress": "127.0.0.1"
},
"responseElements": {
"x-amz-request-id": "EXAMPLE123456789",
"x-amz-id-2": "EXAMPLE123/5678abcder455relambdaisawesome/mnopqrdtygfwxyzABCDEFGH"
},
"s3": {
"s3SchemaVersion": "1.0",
"configurationId": "testConfigRule",
"bucket": {
"name": "MY_BUCKET",
"ownerIdentity": {
"principalId": "EXAMPLE"
},
"arn": "arn:aws:s3:::MY_BUCKET"
},
"object": {
"key": "MY_NEY_FILE_NAME",
"size": 1024,
"eTag": "01234567847647383cdef0123456789abcdef",
"sequencer": "0A1B2Cghfdjd3D4E5F678901"
}
}
}
]
}
lambda_function.py
import os
import boto3
from botocore.exceptions import ClientError
SECRET_KEY = "XXXX"
ACCESS_KEY = "XXXX"
processed_folder = "drivers"
def lambda_handler(event, context):
"""Upload a file to an S3 bucket
"""
for record in event['Records']:
# Create some variables that make it easier to work with the data in the
# event record.
bucket = record['s3']['bucket']['name'
key = record['s3']['object']['key']
file_name = ????
s3_client = boto3.client('s3', aws_access_key_id=ACCESS_KEY, aws_secret_access_key=SECRET_KEY)
try:
response = s3_client.upload_file(file_name, bucket, key)
except ClientError as e:
logging.error(e)
return False
return True
And for obvious reasons, I'm getting a mistake "name 'file_name' is not defined"
How do I add the file to configuration?