-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathtest_script.py
More file actions
105 lines (91 loc) · 3.19 KB
/
test_script.py
File metadata and controls
105 lines (91 loc) · 3.19 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
#!/usr/bin/env python3
import unittest
import os
import datetime
import boto3
import script
from moto import mock_s3
from freezegun import freeze_time
# variables used for the cleanup testing
# Prefix structure:
# s3://my-mock-bucket/mock-root-prefix/mock-sub-prefix/
mock_BUCKET = "my-mock-bucket"
mock_REGION = "eu-west-1"
mock_ROOT_PREFIX = "mock-root-prefix"
mock_SUB_PREFIX = "mock-sub-prefix"
mock_prefix = os.path.join(mock_ROOT_PREFIX, mock_SUB_PREFIX, "")
mock_FILENAMES = [
"test_object_01",
"test_object_02",
"test_object_03",
"test_object_04",
"test_object_05",
"test_object_06",
]
days_to_keep = 4
# the cleanup test starts here
# we use moto to mockup access to S3 (connection, object creation,
# the whole thing...)
@mock_s3
class test_cleanup_class(unittest.TestCase):
def setUp(self):
client = boto3.client(
"s3",
region_name=mock_REGION,
aws_access_key_id="fake_access_key",
aws_secret_access_key="fake_secret_key",
)
# create the mock bucket resource
client.create_bucket(
Bucket=mock_BUCKET,
CreateBucketConfiguration={"LocationConstraint": mock_REGION},
)
# populate the bucket with mock data
# use freeze_time to force the creation/last modified
# attritbute
_today = datetime.datetime.now().replace(hour=0, minute=0, second=0)
for count, i in enumerate(mock_FILENAMES):
_filename = os.path.join(mock_prefix, i)
_timestamp = _today - datetime.timedelta(days=count)
with freeze_time(_timestamp.strftime("%Y-%m-%d %H:%M:%S")):
client.put_object(Bucket=mock_BUCKET, Key=_filename)
# read the mock metadata
_object = client.head_object(Bucket=mock_BUCKET, Key=_filename)
# from the object metadata, print the last modified attribute
print(_filename, _object["LastModified"])
# this is a control object
with freeze_time("2015-01-01 00:00:00"):
client.put_object(
Bucket=mock_BUCKET,
Key=mock_prefix + "anotherfile",
)
# gather information from the bucket
self.client = client
self.buckets = client.list_buckets()
self.objects = client.list_objects(Bucket=mock_BUCKET, Prefix=mock_prefix)
def tearDown(self):
s3 = boto3.resource(
"s3",
region_name="eu-west-1",
aws_access_key_id="fake_access_key",
aws_secret_access_key="fake_secret_key",
)
bucket = s3.Bucket(mock_BUCKET)
for key in bucket.objects.all():
key.delete()
bucket.delete()
def test_cleanup(self):
# print('Bucket:', self.buckets) # DEBUG
# print('Files', self.objects) # DEBUG
print(type(self.client))
total_files, to_delete = script.cleanup(
self.client,
bucket=mock_BUCKET,
main_folder=mock_ROOT_PREFIX,
sub_folder=mock_SUB_PREFIX,
keep=days_to_keep,
)
self.assertEqual(total_files, 3)
self.assertEqual(to_delete, 0)
if __name__ == "__main__":
unittest.main()