Skip to content

Commit

Permalink
feat: Add lambda_log_level variable
Browse files Browse the repository at this point in the history
  • Loading branch information
baolsen committed Feb 23, 2023
1 parent cf7464d commit a2f6d5a
Show file tree
Hide file tree
Showing 10 changed files with 81 additions and 42 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ Full contributing [guidelines are covered here](.github/contributing.md).
| <a name="input_lambda_deployment_s3_bucket"></a> [lambda\_deployment\_s3\_bucket](#input\_lambda\_deployment\_s3\_bucket) | S3 bucket for lambda deployment package. | `string` | `null` | no |
| <a name="input_lambda_deployment_s3_key"></a> [lambda\_deployment\_s3\_key](#input\_lambda\_deployment\_s3\_key) | S3 object key for lambda deployment package. Otherwise, defaults to `var.naming_prefix/local.deployment_filename`. | `string` | `null` | no |
| <a name="input_lambda_deployment_upload_to_s3_enabled"></a> [lambda\_deployment\_upload\_to\_s3\_enabled](#input\_lambda\_deployment\_upload\_to\_s3\_enabled) | If `true`, the lambda deployment package within this module repo will be copied to S3. If `false` then the S3 object must be uploaded separately. Ignored if `lambda_deployment_s3_bucket` is null. | `bool` | `true` | no |
| <a name="input_lambda_log_level"></a> [lambda\_log\_level](#input\_lambda\_log\_level) | Lambda logging level. One of: `["DEBUG", "INFO", "WARN", "ERROR"]`. | `string` | `"WARN"` | no |
| <a name="input_lambda_memory_size"></a> [lambda\_memory\_size](#input\_lambda\_memory\_size) | The amount of memory for Lambda to use | `number` | `"128"` | no |
| <a name="input_lambda_runtime"></a> [lambda\_runtime](#input\_lambda\_runtime) | The lambda runtime to use. One of: `["python3.9", "python3.8", "python3.7"]` | `string` | `"python3.8"` | no |
| <a name="input_log_retention_in_days"></a> [log\_retention\_in\_days](#input\_log\_retention\_in\_days) | Number of days to keep CloudWatch logs | `number` | `14` | no |
Expand Down
Empty file modified build.sh
100644 → 100755
Empty file.
86 changes: 50 additions & 36 deletions clickopsnotifier/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,8 @@
from messenger import Messenger
from delivery_stream import DeliveryStream

logger = logging.getLogger("clickopsnotifier")
LOG_LEVEL = os.environ.get("LOG_LEVEL", "INFO")
logger.setLevel(LOG_LEVEL)
LOG_LEVEL = os.environ.get("LOG_LEVEL", "WARN")
logging.getLogger().setLevel(LOG_LEVEL)

WEBHOOK_PARAMETER = os.environ.get("WEBHOOK_PARAMETER", "")
EXCLUDED_ACCOUNTS = json.loads(os.environ.get("EXCLUDED_ACCOUNTS", "[]"))
Expand Down Expand Up @@ -82,7 +81,7 @@ def valid_user(email) -> Tuple[bool, str]:
if email in INCLUDED_USERS:
return True, f"[VU_EXPLICIT_INCLUDE] {email} in {json.dumps(INCLUDED_USERS)}"

print(f"[VU_IMPLICIT_EXCLUDE] {email} not in {json.dumps(INCLUDED_USERS)}")
logging.info(f"[VU_IMPLICIT_EXCLUDE] {email} not in {json.dumps(INCLUDED_USERS)}")
return False


Expand All @@ -94,9 +93,10 @@ def handler_organizational(event, context) -> None: # noqa: C901
:param context: AWS Lambda Context Object
:return: None
"""
logging.debug(f"event={event}")
logging.info(f"event={event}")
if event is None:
raise KeyError("event is None")
sqs_records = event["Records"]

webhook_url = get_webhook()

Expand All @@ -105,34 +105,46 @@ def handler_organizational(event, context) -> None: # noqa: C901
delivery_stream = DeliveryStream(delivery_stream_name=FIREHOSE_DELIVERY_STREAM_NAME)

success = True
for record in event:
# Get the object from the event and show its content type
bucket = record["s3"]["bucket"]["name"]
key = urllib.parse.unquote_plus(record["s3"]["object"]["key"], encoding="utf-8")

key_elements = key.split("/")
if "CloudTrail" not in key_elements:
continue

is_valid_account, reason = valid_account(key)

if not is_valid_account:
continue

response = s3.get_object(Bucket=bucket, Key=key)
content = response["Body"].read()

trail_event_origin = f"{bucket}/{key}"
with gzip.GzipFile(fileobj=io.BytesIO(content), mode="rb") as fh:
trail_event_json = json.load(fh)
for trail_event in trail_event_json["Records"]:
success = success and __handle_event(
messenger=messenger,
delivery_stream=delivery_stream,
trail_event=trail_event,
trail_event_origin=trail_event_origin,
standalone=False,
)

for sqs_record in sqs_records:
logging.info(f"{sqs_record=}")
sqs_record_body = json.loads(sqs_record["body"])
s3_event_records = sqs_record_body.get("Records", [])
for s3_event_record in s3_event_records:
logging.info(f"{s3_event_record=}")
# Get the object from the event and show its content type
bucket = s3_event_record["s3"]["bucket"]["name"]

key = urllib.parse.unquote_plus(
s3_event_record["s3"]["object"]["key"], encoding="utf-8"
)
key_elements = key.split("/")
if "CloudTrail" not in key_elements:
logging.info("Skipping record; CloudTrail is not in the S3 key.")
continue

is_valid_account, reason = valid_account(key)

if not is_valid_account:
logging.info("Skipping record; Not a valid account.")
continue

response = s3.get_object(Bucket=bucket, Key=key)
content = response["Body"].read()

trail_event_origin = f"{bucket}/{key}"
with gzip.GzipFile(fileobj=io.BytesIO(content), mode="rb") as fh:
trail_event_json = json.load(fh)
logging.info(f"{trail_event_json=}")
for trail_event in trail_event_json["Records"]:
success = success and __handle_event(
messenger=messenger,
delivery_stream=delivery_stream,
trail_event=trail_event,
trail_event_origin=trail_event_origin,
standalone=False,
)
logging.info(f"{success=}")

if not success:
logging.error(f"event={json.dumps(event)}")
Expand All @@ -155,7 +167,7 @@ def handler_standalone(event, context) -> None:
event_uncompressed = gzip.decompress(event_decoded_compressed)
event_json = json.loads(event_uncompressed)

# print(event_uncompressed)
# logging.info(event_uncompressed)

success = True
for e in event_json["logEvents"]:
Expand All @@ -176,7 +188,7 @@ def handler_standalone(event, context) -> None:
)

if not success:
print("event_uncompressed:\n\n" + event_uncompressed)
logging.info("event_uncompressed:\n\n" + event_uncompressed)
raise Exception("A problem occurred, please review error logs.")

return "Completed"
Expand All @@ -190,11 +202,13 @@ def __handle_event(
is_valid_user, reason = valid_user(cloudtrail_event.user_email)

if not is_valid_user:
return
logging.info("Skipping; Is not valid user.")
return True

clickops_checker = ClickOpsEventChecker(cloudtrail_event, EXCLUDED_SCOPED_ACTIONS)

is_clickops, reason = clickops_checker.is_clickops()
logging.info(f"{is_clickops=}")

if is_clickops:
result1 = messenger.send(
Expand Down
2 changes: 1 addition & 1 deletion clickopsnotifier/delivery_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
class FakeClient:
def put_record_batch(self, **kwargs):
num_records = len(kwargs.get("Records", []))
logging.debug(f"FakeClient put_record_batch: {num_records} records")
logging.info(f"FakeClient put_record_batch: {num_records} records")
return {"FailedPutCount": 0}


Expand Down
16 changes: 12 additions & 4 deletions clickopsnotifier/messenger.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
import requests
import logging


class Messenger:
Expand Down Expand Up @@ -46,15 +47,22 @@ def __send_msteams_message(
}
],
}

response = requests.post(self.webhook, json=payload)
if response.status_code != 200:
logging.info(f"json payload:\n\n{json.dumps(payload)}")
logging.error(f"response.content={response.content}")
return False
return True

def __send_slack_message(
self, user, trail_event, trail_event_origin: str, standalone: bool
) -> bool:
# Maximum length for a section block is 3k so truncate to 2900
formatted_event = json.dumps(trail_event, indent=2)
if len(formatted_event) < 2900:
formatted_event = f"*Event*\n```{formatted_event}```"
else:
formatted_event = f"*Event (truncated)*\n```{formatted_event[:2900]}```"
payload = {
"blocks": [
{
Expand Down Expand Up @@ -109,14 +117,14 @@ def __send_slack_message(
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"*Event*\n```{json.dumps(trail_event, indent=2)}```",
"text": formatted_event,
},
},
]
}

response = requests.post(self.webhook, json=payload)
if response.status_code != 200:
print(response.content)
logging.info(f"json payload:\n\n{json.dumps(payload)}")
logging.error(f"response.content={response.content}")
return False
return True
Binary file modified deployment-clickopsnotifier-python3.7.zip
Binary file not shown.
Binary file modified deployment-clickopsnotifier-python3.8.zip
Binary file not shown.
Binary file modified deployment-clickopsnotifier-python3.9.zip
Binary file not shown.
2 changes: 1 addition & 1 deletion main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ module "clickops_notifier_lambda" {

MESSAGE_FORMAT = var.message_format

LOG_LEVEL = "INFO"
LOG_LEVEL = var.lambda_log_level

FIREHOSE_DELIVERY_STREAM_NAME = coalesce(var.firehose_delivery_stream_name, "__NONE__")
}
Expand Down
16 changes: 16 additions & 0 deletions variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,22 @@ variable "lambda_memory_size" {
default = "128"
}

variable "lambda_log_level" {
description = "Lambda logging level. One of: `[\"DEBUG\", \"INFO\", \"WARN\", \"ERROR\"]`."
type = string
default = "WARN"

validation {
condition = contains([
"DEBUG",
"INFO",
"WARN",
"ERROR"
], var.lambda_log_level)
error_message = "Invalid lambda_log_level provided."
}
}

# IAM configuration
variable "create_iam_role" {
description = "Determines whether a an IAM role is created or to use an existing IAM role"
Expand Down

0 comments on commit a2f6d5a

Please sign in to comment.