-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy paths3_event_lambda.py
More file actions
85 lines (58 loc) · 2.96 KB
/
s3_event_lambda.py
File metadata and controls
85 lines (58 loc) · 2.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
# description: This lambda function will send you email once any new objects land in the s3 source bucket, then check specific object name and its timestamp, if meet any condition, then move to another S3 target bucket, then delete those objects from the s3 source bucket.
# create a lambda function and insert code here:
import boto3
import json
from datetime import datetime, timedelta
import urllib.parse
def lambda_handler(event, context):
# TODO implement
for i in event["Records"]:
action = i["eventName"]
bucket_name = i["s3"]["bucket"]["name"]
object = urllib.parse.unquote_plus(i["s3"]["object"]["key"])
client = boto3.client("ses")
subject = str(action) + ' Event from ' + bucket_name
body = """
<br>
This email is to notify you regarding {} event.
<br>
The object name is {}.
""".format(action, object)
message = {"Subject":{"Data": subject}, "Body": { "Html": {"Data": body}}}
response = client.send_email(Source="xxx@domain.com", Destination = {"ToAddresses": ["xx@domain.com", "xxx@domain.com"]}, Message =message )
print('Email has been sent')
# This section is for copy and delete task
SOURCE_BUCKET = '<source bucket name>'
DESTINATION_BUCKET = '<target bucket name>'
s3_client = boto3.client('s3')
# Create a reusable Paginator
paginator = s3_client.get_paginator('list_objects_v2')
# Create a PageIterator from the Paginator
page_iterator = paginator.paginate(Bucket=SOURCE_BUCKET)
# Loop through each object, looking for ones older than a given time period
for page in page_iterator:
if "Contents" in page:
for object in page['Contents']:
#if object['Key']=='MyOrder.csv':
if object['Key'].startswith('<give a key prefix name>') and object['LastModified'] <= datetime.now().astimezone() - timedelta(days=20):
key_name = object['Key']
timestamp = object['LastModified']
print(key_name,'was found')
print(key_name, '=>', timestamp)
#Copy object
print('Start to copy', key_name, 'to', DESTINATION_BUCKET)
s3_client.copy_object(
Bucket=DESTINATION_BUCKET,
Key=key_name,
CopySource={'Bucket':SOURCE_BUCKET, 'Key':key_name}
)
print(key_name, 'has been copied to', DESTINATION_BUCKET)
# Delete original object
s3_client.delete_object(Bucket=SOURCE_BUCKET, Key=key_name)
print(key_name,'has been deleted from', SOURCE_BUCKET)
else:
print('No contents key for page!!!')
return {
'statusCode': 200,
'body': json.dumps('Hello from Lambda!')
}