This repository has been archived by the owner on Nov 5, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
refusereminder.py
executable file
·145 lines (114 loc) · 4.32 KB
/
refusereminder.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
#!/usr/bin/env python
from __future__ import print_function
import boto3
import json
import os
from mkerefuse.refuse import RefuseQuery
from mkerefuse.refuse import RefuseQueryAddress
from mkerefuse.util import json_serial
from mkerefuse.util import pickup_to_ics
DEFAULT_SNS_TOPIC = 'mke-trash-pickup'
"""Default topic to notify for pickup changes"""
DEFAULT_S3_BUCKET = 'mke-trash-pickup'
"""Default S3 bucket name for storing persistent data"""
DEFAULT_S3_PREFIX = ''
"""Default S3 key prefix for persistent data"""
DEFAULT_S3_KEY = 'mke-trash-pickup.json'
"""Default S3 key for persistent data"""
HASH_SALT = '9B6E3FFC10EBB3001F1A586257C0E886'
"""Hash salt to hash address with"""
NOTIFY_DATE_FORMAT = '%A %Y-%m-%d'
"""Format for showing collection dates"""
def get_sns_topic_arn(topic_name, aws_region=None, aws_account_num=None):
if aws_region is None:
aws_region = boto3.session.Session().region_name
if aws_account_num is None:
aws_account_num = boto3.client('sts').get_caller_identity()['Account']
return ":".join([
"arn",
"aws",
"sns",
aws_region,
aws_account_num,
topic_name])
def notify_pickup_change(pickup, sns_topic):
"""
Produces a notification for a garbage pickup change
"""
msg_parts = [
"Garbage: " + pickup.next_pickup_garbage.strftime(NOTIFY_DATE_FORMAT)
]
if pickup.next_pickup_recycle:
# Summer time notification
msg_parts += [
"Recycle: " + pickup.next_pickup_recycle.strftime(NOTIFY_DATE_FORMAT),
]
else:
# Winter time notification
msg_parts += [
"Recycle (After): " + pickup.next_pickup_recycle_after.strftime(NOTIFY_DATE_FORMAT),
"Recycle (Before): " + pickup.next_pickup_recycle_before.strftime(NOTIFY_DATE_FORMAT),
]
notify_msg = "\n".join(msg_parts)
print("\n{}\n".format(notify_msg))
print("Notifying SNS: {}".format(sns_topic.arn))
sns_topic.publish(
Subject='Garbage Day Update',
Message=notify_msg)
print("Message published to SNS!")
def lambda_handler(event, context):
"""
Detects garbage day changes & updates them
"""
# Compose the address
address = RefuseQueryAddress(
house_number=event['house_number'],
direction=event['direction'],
street_name=event['street_name'],
street_type=event['street_type'])
print("Querying address: {num} {d} {name} {t}".format(
num=address.house_number,
d=address.direction,
name=address.street_name,
t=address.street_type))
# Query for the collection schedule
pickup = RefuseQuery.Execute(address)
# Create an S3 resource for fetching/storing persistent data
s3 = boto3.resource('s3')
# Attempt reading the last pickup information
s3_bucket = event.get('s3_bucket', DEFAULT_S3_BUCKET)
s3_key = os.path.join(
event.get('s3_prefix', DEFAULT_S3_PREFIX),
event.get('s3_key', DEFAULT_S3_KEY)).lstrip('/')
s3_object = s3.Object(s3_bucket, s3_key)
last_data = json.loads('{}')
try:
print("Loading previous pickup data from s3://{b}/{k}".format(
b=s3_object.bucket_name,
k=s3_object.key))
last_data = json.loads(s3_object.get()['Body'].read().decode('utf-8'))
except Exception as e:
# Failed to load old data for some reason
# Ignore it and assume a change in dates
print("Failed to load previous pickup data")
print(e)
# Overwrite previous pickup data with the new data
s3_object.put(Body=json.dumps(pickup.to_dict(), default=json_serial))
# If the information differs, notify of the changes
if last_data != pickup.to_dict():
print("Pickup change detected")
sns = boto3.resource('sns')
notify_pickup_change(
pickup,
sns_topic=sns.Topic(
get_sns_topic_arn(event.get('sns_topic', DEFAULT_SNS_TOPIC))))
# Determine S3 key to write out an ics file to
s3_key = os.path.join(
event.get('s3_prefix', DEFAULT_S3_PREFIX),
address.get_hash(HASH_SALT) + '.ics')
s3_object = s3.Object(s3_bucket, s3_key)
s3_object.put(
ACL='public-read',
Body=pickup_to_ics(address, pickup),
CacheControl="max-age=14400",
)