Date: Sat, 5 Jun 2021 08:50:37 -0700
Subject: [PATCH 2/6] feat(data-classes): decorator to instantiate data_classes
and docs updates (#442)
---
.../utilities/data_classes/__init__.py | 2 +
.../utilities/data_classes/alb_event.py | 2 +
.../utilities/data_classes/event_source.py | 39 ++
.../utilities/idempotency/persistence/base.py | 1 +
docs/utilities/data_classes.md | 456 +++++++++---------
.../idempotency/test_idempotency.py | 43 +-
tests/functional/test_data_classes.py | 13 +
7 files changed, 339 insertions(+), 217 deletions(-)
create mode 100644 aws_lambda_powertools/utilities/data_classes/event_source.py
diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py
index a47c32ee07f..c5391880122 100644
--- a/aws_lambda_powertools/utilities/data_classes/__init__.py
+++ b/aws_lambda_powertools/utilities/data_classes/__init__.py
@@ -10,6 +10,7 @@
from .connect_contact_flow_event import ConnectContactFlowEvent
from .dynamo_db_stream_event import DynamoDBStreamEvent
from .event_bridge_event import EventBridgeEvent
+from .event_source import event_source
from .kinesis_stream_event import KinesisStreamEvent
from .s3_event import S3Event
from .ses_event import SESEvent
@@ -31,4 +32,5 @@
"SESEvent",
"SNSEvent",
"SQSEvent",
+ "event_source",
]
diff --git a/aws_lambda_powertools/utilities/data_classes/alb_event.py b/aws_lambda_powertools/utilities/data_classes/alb_event.py
index 73e064d0f26..159779c86a7 100644
--- a/aws_lambda_powertools/utilities/data_classes/alb_event.py
+++ b/aws_lambda_powertools/utilities/data_classes/alb_event.py
@@ -6,6 +6,7 @@
class ALBEventRequestContext(DictWrapper):
@property
def elb_target_group_arn(self) -> str:
+ """Target group arn for your Lambda function"""
return self["requestContext"]["elb"]["targetGroupArn"]
@@ -15,6 +16,7 @@ class ALBEvent(BaseProxyEvent):
Documentation:
--------------
- https://docs.aws.amazon.com/lambda/latest/dg/services-alb.html
+ - https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html
"""
@property
diff --git a/aws_lambda_powertools/utilities/data_classes/event_source.py b/aws_lambda_powertools/utilities/data_classes/event_source.py
new file mode 100644
index 00000000000..3968f923573
--- /dev/null
+++ b/aws_lambda_powertools/utilities/data_classes/event_source.py
@@ -0,0 +1,39 @@
+from typing import Any, Callable, Dict, Type
+
+from aws_lambda_powertools.middleware_factory import lambda_handler_decorator
+from aws_lambda_powertools.utilities.data_classes.common import DictWrapper
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+@lambda_handler_decorator
+def event_source(
+ handler: Callable[[Any, LambdaContext], Any],
+ event: Dict[str, Any],
+ context: LambdaContext,
+ data_class: Type[DictWrapper],
+):
+ """Middleware to create an instance of the passed in event source data class
+
+ Parameters
+ ----------
+ handler: Callable
+ Lambda's handler
+ event: Dict
+ Lambda's Event
+ context: Dict
+ Lambda's Context
+ data_class: Type[DictWrapper]
+ Data class type to instantiate
+
+ Example
+ --------
+
+ **Sample usage**
+
+ from aws_lambda_powertools.utilities.data_classes import S3Event, event_source
+
+ @event_source(data_class=S3Event)
+ def handler(event: S3Event, context):
+ return {"key": event.object_key}
+ """
+ return handler(data_class(event), context)
diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py
index 0cbd34213c1..31aef6dc0f2 100644
--- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py
+++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py
@@ -224,6 +224,7 @@ def _generate_hash(self, data: Any) -> str:
Hashed representation of the provided data
"""
+ data = getattr(data, "raw_event", data) # could be a data class depending on decorator order
hashed_data = self.hash_function(json.dumps(data, cls=Encoder).encode())
return hashed_data.hexdigest()
diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md
index 0fc33d3a3f7..5b0d0db8c0a 100644
--- a/docs/utilities/data_classes.md
+++ b/docs/utilities/data_classes.md
@@ -21,22 +21,35 @@ Lambda function.
### Utilizing the data classes
-The classes are initialized by passing in the Lambda event object into the constructor of the appropriate data class.
+The classes are initialized by passing in the Lambda event object into the constructor of the appropriate data class or
+by using the `event_source` decorator.
For example, if your Lambda function is being triggered by an API Gateway proxy integration, you can use the
`APIGatewayProxyEvent` class.
=== "app.py"
- ```python hl_lines="1 4"
- from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent
+```python hl_lines="1 4"
+from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent
- def lambda_handler(event, context):
- event: APIGatewayProxyEvent = APIGatewayProxyEvent(event)
+def lambda_handler(event: dict, context):
+ event = APIGatewayProxyEvent(event)
+ if 'helloworld' in event.path and event.http_method == 'GET':
+ do_something_with(event.body, user)
+```
- if 'helloworld' in event.path and event.http_method == 'GET':
- do_something_with(event.body, user)
- ```
+Same example as above, but using the `event_source` decorator
+
+=== "app.py"
+
+```python hl_lines="1 3"
+from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEvent
+
+@event_source(data_class=APIGatewayProxyEvent)
+def lambda_handler(event: APIGatewayProxyEvent, context):
+ if 'helloworld' in event.path and event.http_method == 'GET':
+ do_something_with(event.body, user)
+```
**Autocomplete with self-documented properties and methods**
@@ -49,7 +62,8 @@ For example, if your Lambda function is being triggered by an API Gateway proxy
Event Source | Data_class
------------------------------------------------- | ---------------------------------------------------------------------------------
[API Gateway Proxy](#api-gateway-proxy) | `APIGatewayProxyEvent`
-[API Gateway Proxy event v2](#api-gateway-proxy-v2) | `APIGatewayProxyEventV2`
+[API Gateway Proxy V2](#api-gateway-proxy-v2) | `APIGatewayProxyEventV2`
+[Application Load Balancer](#application-load-balancer) | `ALBEvent`
[AppSync Resolver](#appsync-resolver) | `AppSyncResolverEvent`
[CloudWatch Logs](#cloudwatch-logs) | `CloudWatchLogsEvent`
[CodePipeline Job Event](#codepipeline-job) | `CodePipelineJobEvent`
@@ -76,34 +90,47 @@ It is used for either API Gateway REST API or HTTP API using v1 proxy event.
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent
+```python
+from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEvent
- def lambda_handler(event, context):
- event: APIGatewayProxyEvent = APIGatewayProxyEvent(event)
+@event_source(data_class=APIGatewayProxyEvent)
+def lambda_handler(event: APIGatewayProxyEvent, context):
+ if "helloworld" in event.path and event.http_method == "GET":
request_context = event.request_context
identity = request_context.identity
+ user = identity.user
+ do_something_with(event.json_body, user)
+```
- if 'helloworld' in event.path and event.http_method == 'GET':
- user = identity.user
- do_something_with(event.body, user)
- ```
+### API Gateway Proxy V2
-### API Gateway Proxy v2
+It is used for HTTP API using v2 proxy event.
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEventV2
+```python
+from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEventV2
- def lambda_handler(event, context):
- event: APIGatewayProxyEventV2 = APIGatewayProxyEventV2(event)
- request_context = event.request_context
- query_string_parameters = event.query_string_parameters
+@event_source(data_class=APIGatewayProxyEventV2)
+def lambda_handler(event: APIGatewayProxyEventV2, context):
+ if "helloworld" in event.path and event.http_method == "POST":
+ do_something_with(event.json_body, event.query_string_parameters)
+```
- if 'helloworld' in event.raw_path and request_context.http.method == 'POST':
- do_something_with(event.body, query_string_parameters)
- ```
+### Application Load Balancer
+
+Is it used for Application load balancer event.
+
+=== "app.py"
+
+```python
+from aws_lambda_powertools.utilities.data_classes import event_source, ALBEvent
+
+@event_source(data_class=ALBEvent)
+def lambda_handler(event: ALBEvent, context):
+ if "helloworld" in event.path and event.http_method == "POST":
+ do_something_with(event.json_body, event.query_string_parameters)
+```
### AppSync Resolver
@@ -210,18 +237,17 @@ decompress and parse json data from the event.
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes import CloudWatchLogsEvent
- from aws_lambda_powertools.utilities.data_classes.cloud_watch_logs_event import CloudWatchLogsDecodedData
+```python
+from aws_lambda_powertools.utilities.data_classes import event_source, CloudWatchLogsEvent
+from aws_lambda_powertools.utilities.data_classes.cloud_watch_logs_event import CloudWatchLogsDecodedData
- def lambda_handler(event, context):
- event: CloudWatchLogsEvent = CloudWatchLogsEvent(event)
-
- decompressed_log: CloudWatchLogsDecodedData = event.parse_logs_data
- log_events = decompressed_log.log_events
- for event in log_events:
- do_something_with(event.timestamp, event.message)
- ```
+@event_source(data_class=CloudWatchLogsEvent)
+def lambda_handler(event: CloudWatchLogsEvent, context):
+ decompressed_log: CloudWatchLogsDecodedData = event.parse_logs_data
+ log_events = decompressed_log.log_events
+ for event in log_events:
+ do_something_with(event.timestamp, event.message)
+```
### CodePipeline Job
@@ -229,51 +255,50 @@ Data classes and utility functions to help create continuous delivery pipelines
=== "app.py"
- ```python
- from aws_lambda_powertools import Logger
- from aws_lambda_powertools.utilities.data_classes import CodePipelineJobEvent
+```python
+from aws_lambda_powertools import Logger
+from aws_lambda_powertools.utilities.data_classes import event_source, CodePipelineJobEvent
- logger = Logger()
+logger = Logger()
+@event_source(data_class=CodePipelineJobEvent)
+def lambda_handler(event, context):
+ """The Lambda function handler
- def lambda_handler(event, context):
- """The Lambda function handler
-
- If a continuing job then checks the CloudFormation stack status
- and updates the job accordingly.
-
- If a new job then kick of an update or creation of the target
- CloudFormation stack.
- """
- event: CodePipelineJobEvent = CodePipelineJobEvent(event)
-
- # Extract the Job ID
- job_id = event.get_id
-
- # Extract the params
- params: dict = event.decoded_user_parameters
- stack = params["stack"]
- artifact_name = params["artifact"]
- template_file = params["file"]
-
- try:
- if event.data.continuation_token:
- # If we're continuing then the create/update has already been triggered
- # we just need to check if it has finished.
- check_stack_update_status(job_id, stack)
- else:
- template = event.get_artifact(artifact_name, template_file)
- # Kick off a stack update or create
- start_update_or_create(job_id, stack, template)
- except Exception as e:
- # If any other exceptions which we didn't expect are raised
- # then fail the job and log the exception message.
- logger.exception("Function failed due to exception.")
- put_job_failure(job_id, "Function exception: " + str(e))
-
- logger.debug("Function complete.")
- return "Complete."
- ```
+ If a continuing job then checks the CloudFormation stack status
+ and updates the job accordingly.
+
+ If a new job then kick of an update or creation of the target
+ CloudFormation stack.
+ """
+
+ # Extract the Job ID
+ job_id = event.get_id
+
+ # Extract the params
+ params: dict = event.decoded_user_parameters
+ stack = params["stack"]
+ artifact_name = params["artifact"]
+ template_file = params["file"]
+
+ try:
+ if event.data.continuation_token:
+ # If we're continuing then the create/update has already been triggered
+ # we just need to check if it has finished.
+ check_stack_update_status(job_id, stack)
+ else:
+ template = event.get_artifact(artifact_name, template_file)
+ # Kick off a stack update or create
+ start_update_or_create(job_id, stack, template)
+ except Exception as e:
+ # If any other exceptions which we didn't expect are raised
+ # then fail the job and log the exception message.
+ logger.exception("Function failed due to exception.")
+ put_job_failure(job_id, "Function exception: " + str(e))
+
+ logger.debug("Function complete.")
+ return "Complete."
+```
### Cognito User Pool
@@ -297,15 +322,15 @@ Verify Auth Challenge | `data_classes.cognito_user_pool_event.VerifyAuthChalleng
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import PostConfirmationTriggerEvent
+```python
+from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import PostConfirmationTriggerEvent
- def lambda_handler(event, context):
- event: PostConfirmationTriggerEvent = PostConfirmationTriggerEvent(event)
+def lambda_handler(event, context):
+ event: PostConfirmationTriggerEvent = PostConfirmationTriggerEvent(event)
- user_attributes = event.request.user_attributes
- do_something_with(user_attributes)
- ```
+ user_attributes = event.request.user_attributes
+ do_something_with(user_attributes)
+```
#### Define Auth Challenge Example
@@ -470,17 +495,18 @@ This example is based on the AWS Cognito docs for [Create Auth Challenge Lambda
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import CreateAuthChallengeTriggerEvent
+```python
+from aws_lambda_powertools.utilities.data_classes import event_source
+from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import CreateAuthChallengeTriggerEvent
- def handler(event: dict, context) -> dict:
- event: CreateAuthChallengeTriggerEvent = CreateAuthChallengeTriggerEvent(event)
- if event.request.challenge_name == "CUSTOM_CHALLENGE":
- event.response.public_challenge_parameters = {"captchaUrl": "url/123.jpg"}
- event.response.private_challenge_parameters = {"answer": "5"}
- event.response.challenge_metadata = "CAPTCHA_CHALLENGE"
- return event.raw_event
- ```
+@event_source(data_class=CreateAuthChallengeTriggerEvent)
+def handler(event: CreateAuthChallengeTriggerEvent, context) -> dict:
+ if event.request.challenge_name == "CUSTOM_CHALLENGE":
+ event.response.public_challenge_parameters = {"captchaUrl": "url/123.jpg"}
+ event.response.private_challenge_parameters = {"answer": "5"}
+ event.response.challenge_metadata = "CAPTCHA_CHALLENGE"
+ return event.raw_event
+```
#### Verify Auth Challenge Response Example
@@ -488,16 +514,17 @@ This example is based on the AWS Cognito docs for [Verify Auth Challenge Respons
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import VerifyAuthChallengeResponseTriggerEvent
+```python
+from aws_lambda_powertools.utilities.data_classes import event_source
+from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import VerifyAuthChallengeResponseTriggerEvent
- def handler(event: dict, context) -> dict:
- event: VerifyAuthChallengeResponseTriggerEvent = VerifyAuthChallengeResponseTriggerEvent(event)
- event.response.answer_correct = (
- event.request.private_challenge_parameters.get("answer") == event.request.challenge_answer
- )
- return event.raw_event
- ```
+@event_source(data_class=VerifyAuthChallengeResponseTriggerEvent)
+def handler(event: VerifyAuthChallengeResponseTriggerEvent, context) -> dict:
+ event.response.answer_correct = (
+ event.request.private_challenge_parameters.get("answer") == event.request.challenge_answer
+ )
+ return event.raw_event
+```
### Connect Contact Flow
@@ -505,21 +532,21 @@ This example is based on the AWS Cognito docs for [Verify Auth Challenge Respons
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes.connect_contact_flow_event import (
- ConnectContactFlowChannel,
- ConnectContactFlowEndpointType,
- ConnectContactFlowEvent,
- ConnectContactFlowInitiationMethod,
- )
-
- def lambda_handler(event, context):
- event: ConnectContactFlowEvent = ConnectContactFlowEvent(event)
- assert event.contact_data.attributes == {"Language": "en-US"}
- assert event.contact_data.channel == ConnectContactFlowChannel.VOICE
- assert event.contact_data.customer_endpoint.endpoint_type == ConnectContactFlowEndpointType.TELEPHONE_NUMBER
- assert event.contact_data.initiation_method == ConnectContactFlowInitiationMethod.API
- ```
+```python
+from aws_lambda_powertools.utilities.data_classes.connect_contact_flow_event import (
+ ConnectContactFlowChannel,
+ ConnectContactFlowEndpointType,
+ ConnectContactFlowEvent,
+ ConnectContactFlowInitiationMethod,
+)
+
+def lambda_handler(event, context):
+ event: ConnectContactFlowEvent = ConnectContactFlowEvent(event)
+ assert event.contact_data.attributes == {"Language": "en-US"}
+ assert event.contact_data.channel == ConnectContactFlowChannel.VOICE
+ assert event.contact_data.customer_endpoint.endpoint_type == ConnectContactFlowEndpointType.TELEPHONE_NUMBER
+ assert event.contact_data.initiation_method == ConnectContactFlowInitiationMethod.API
+```
### DynamoDB Streams
@@ -529,34 +556,34 @@ attributes values (`AttributeValue`), as well as enums for stream view type (`St
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import (
- DynamoDBStreamEvent,
- DynamoDBRecordEventName
- )
+```python
+from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import (
+ DynamoDBStreamEvent,
+ DynamoDBRecordEventName
+)
- def lambda_handler(event, context):
- event: DynamoDBStreamEvent = DynamoDBStreamEvent(event)
+def lambda_handler(event, context):
+ event: DynamoDBStreamEvent = DynamoDBStreamEvent(event)
- # Multiple records can be delivered in a single event
- for record in event.records:
- if record.event_name == DynamoDBRecordEventName.MODIFY:
- do_something_with(record.dynamodb.new_image)
- do_something_with(record.dynamodb.old_image)
- ```
+ # Multiple records can be delivered in a single event
+ for record in event.records:
+ if record.event_name == DynamoDBRecordEventName.MODIFY:
+ do_something_with(record.dynamodb.new_image)
+ do_something_with(record.dynamodb.old_image)
+```
### EventBridge
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes import EventBridgeEvent
+```python
+from aws_lambda_powertools.utilities.data_classes import event_source, EventBridgeEvent
- def lambda_handler(event, context):
- event: EventBridgeEvent = EventBridgeEvent(event)
- do_something_with(event.detail)
+@event_source(data_class=EventBridgeEvent)
+def lambda_handler(event: EventBridgeEvent, context):
+ do_something_with(event.detail)
- ```
+```
### Kinesis streams
@@ -565,40 +592,40 @@ or plain text, depending on the original payload.
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes import KinesisStreamEvent
+```python
+from aws_lambda_powertools.utilities.data_classes import event_source, KinesisStreamEvent
- def lambda_handler(event, context):
- event: KinesisStreamEvent = KinesisStreamEvent(event)
- kinesis_record = next(event.records).kinesis
+@event_source(data_class=KinesisStreamEvent)
+def lambda_handler(event: KinesisStreamEvent, context):
+ kinesis_record = next(event.records).kinesis
- # if data was delivered as text
- data = kinesis_record.data_as_text()
+ # if data was delivered as text
+ data = kinesis_record.data_as_text()
- # if data was delivered as json
- data = kinesis_record.data_as_json()
+ # if data was delivered as json
+ data = kinesis_record.data_as_json()
- do_something_with(data)
- ```
+ do_something_with(data)
+```
### S3
=== "app.py"
- ```python
- from urllib.parse import unquote_plus
- from aws_lambda_powertools.utilities.data_classes import S3Event
+```python
+from urllib.parse import unquote_plus
+from aws_lambda_powertools.utilities.data_classes import event_source, S3Event
- def lambda_handler(event, context):
- event: S3Event = S3Event(event)
- bucket_name = event.bucket_name
+@event_source(data_class=S3Event)
+def lambda_handler(event: S3Event, context):
+ bucket_name = event.bucket_name
- # Multiple records can be delivered in a single event
- for record in event.records:
- object_key = unquote_plus(record.s3.get_object.key)
+ # Multiple records can be delivered in a single event
+ for record in event.records:
+ object_key = unquote_plus(record.s3.get_object.key)
- do_something_with(f'{bucket_name}/{object_key}')
- ```
+ do_something_with(f"{bucket_name}/{object_key}")
+```
### S3 Object Lambda
@@ -606,84 +633,81 @@ This example is based on the AWS Blog post [Introducing Amazon S3 Object Lambda
=== "app.py"
- ```python hl_lines="5-6 12 14"
- import boto3
- import requests
+```python hl_lines="5-6 12 14"
+import boto3
+import requests
- from aws_lambda_powertools import Logger
- from aws_lambda_powertools.logging.correlation_paths import S3_OBJECT_LAMBDA
- from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent
+from aws_lambda_powertools import Logger
+from aws_lambda_powertools.logging.correlation_paths import S3_OBJECT_LAMBDA
+from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent
- logger = Logger()
- session = boto3.Session()
- s3 = session.client("s3")
+logger = Logger()
+session = boto3.Session()
+s3 = session.client("s3")
- @logger.inject_lambda_context(correlation_id_path=S3_OBJECT_LAMBDA, log_event=True)
- def lambda_handler(event, context):
- event = S3ObjectLambdaEvent(event)
+@logger.inject_lambda_context(correlation_id_path=S3_OBJECT_LAMBDA, log_event=True)
+def lambda_handler(event, context):
+ event = S3ObjectLambdaEvent(event)
- # Get object from S3
- response = requests.get(event.input_s3_url)
- original_object = response.content.decode("utf-8")
+ # Get object from S3
+ response = requests.get(event.input_s3_url)
+ original_object = response.content.decode("utf-8")
- # Make changes to the object about to be returned
- transformed_object = original_object.upper()
+ # Make changes to the object about to be returned
+ transformed_object = original_object.upper()
- # Write object back to S3 Object Lambda
- s3.write_get_object_response(
- Body=transformed_object, RequestRoute=event.request_route, RequestToken=event.request_token
- )
+ # Write object back to S3 Object Lambda
+ s3.write_get_object_response(
+ Body=transformed_object, RequestRoute=event.request_route, RequestToken=event.request_token
+ )
- return {"status_code": 200}
- ```
+ return {"status_code": 200}
+```
### SES
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes import SESEvent
+```python
+from aws_lambda_powertools.utilities.data_classes import event_source, SESEvent
- def lambda_handler(event, context):
- event: SESEvent = SESEvent(event)
+@event_source(data_class=SESEvent)
+def lambda_handler(event: SESEvent, context):
+ # Multiple records can be delivered in a single event
+ for record in event.records:
+ mail = record.ses.mail
+ common_headers = mail.common_headers
- # Multiple records can be delivered in a single event
- for record in event.records:
- mail = record.ses.mail
- common_headers = mail.common_headers
-
- do_something_with(common_headers.to, common_headers.subject)
- ```
+ do_something_with(common_headers.to, common_headers.subject)
+```
### SNS
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes import SNSEvent
+```python
+from aws_lambda_powertools.utilities.data_classes import event_source, SNSEvent
- def lambda_handler(event, context):
- event: SNSEvent = SNSEvent(event)
+@event_source(data_class=SNSEvent)
+def lambda_handler(event: SNSEvent, context):
+ # Multiple records can be delivered in a single event
+ for record in event.records:
+ message = record.sns.message
+ subject = record.sns.subject
- # Multiple records can be delivered in a single event
- for record in event.records:
- message = record.sns.message
- subject = record.sns.subject
-
- do_something_with(subject, message)
- ```
+ do_something_with(subject, message)
+```
### SQS
=== "app.py"
- ```python
- from aws_lambda_powertools.utilities.data_classes import SQSEvent
-
- def lambda_handler(event, context):
- event: SQSEvent = SQSEvent(event)
+```python
+from aws_lambda_powertools.utilities.data_classes import event_source, SQSEvent
- # Multiple records can be delivered in a single event
- for record in event.records:
- do_something_with(record.body)
- ```
+@event_source(data_class=SQSEvent)
+def lambda_handler(event: SQSEvent, context):
+ # Multiple records can be delivered in a single event
+ for record in event.records:
+ do_something_with(record.body)
+```
diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py
index 25f76af48be..0cf19ab9de0 100644
--- a/tests/functional/idempotency/test_idempotency.py
+++ b/tests/functional/idempotency/test_idempotency.py
@@ -1,4 +1,5 @@
import copy
+import hashlib
import json
import sys
from hashlib import md5
@@ -7,6 +8,7 @@
import pytest
from botocore import stub
+from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEventV2, event_source
from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig
from aws_lambda_powertools.utilities.idempotency.exceptions import (
IdempotencyAlreadyInProgressError,
@@ -19,6 +21,7 @@
from aws_lambda_powertools.utilities.idempotency.idempotency import idempotent
from aws_lambda_powertools.utilities.idempotency.persistence.base import BasePersistenceLayer, DataRecord
from aws_lambda_powertools.utilities.validation import envelopes, validator
+from tests.functional.utils import load_event
TABLE_NAME = "TEST_TABLE"
@@ -223,7 +226,7 @@ def lambda_handler(event, context):
def test_idempotent_lambda_first_execution_cached(
idempotency_config: IdempotencyConfig,
persistence_store: DynamoDBPersistenceLayer,
- lambda_apigw_event: DynamoDBPersistenceLayer,
+ lambda_apigw_event,
expected_params_update_item,
expected_params_put_item,
lambda_response,
@@ -845,3 +848,41 @@ def handler(event, context):
handler({}, lambda_context)
assert "No data found to create a hashed idempotency_key" == e.value.args[0]
+
+
+class MockPersistenceLayer(BasePersistenceLayer):
+ def __init__(self, expected_idempotency_key: str):
+ self.expected_idempotency_key = expected_idempotency_key
+ super(MockPersistenceLayer, self).__init__()
+
+ def _put_record(self, data_record: DataRecord) -> None:
+ assert data_record.idempotency_key == self.expected_idempotency_key
+
+ def _update_record(self, data_record: DataRecord) -> None:
+ assert data_record.idempotency_key == self.expected_idempotency_key
+
+ def _get_record(self, idempotency_key) -> DataRecord:
+ ...
+
+ def _delete_record(self, data_record: DataRecord) -> None:
+ ...
+
+
+def test_idempotent_lambda_event_source(lambda_context):
+ # Scenario to validate that we can use the event_source decorator before or after the idempotent decorator
+ mock_event = load_event("apiGatewayProxyV2Event.json")
+ persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(json.dumps(mock_event).encode()).hexdigest())
+ expected_result = {"message": "Foo"}
+
+ # GIVEN an event_source decorator
+ # AND then an idempotent decorator
+ @event_source(data_class=APIGatewayProxyEventV2)
+ @idempotent(persistence_store=persistence_layer)
+ def lambda_handler(event, _):
+ assert isinstance(event, APIGatewayProxyEventV2)
+ return expected_result
+
+ # WHEN calling the lambda handler
+ result = lambda_handler(mock_event, lambda_context)
+ # THEN we expect the handler to execute successfully
+ assert result == expected_result
diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py
index 07648f84ee9..60dfc591897 100644
--- a/tests/functional/test_data_classes.py
+++ b/tests/functional/test_data_classes.py
@@ -62,6 +62,7 @@
DynamoDBStreamEvent,
StreamViewType,
)
+from aws_lambda_powertools.utilities.data_classes.event_source import event_source
from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent
from tests.functional.utils import load_event
@@ -1237,3 +1238,15 @@ def download_file(bucket: str, key: str, tmp_name: str):
}
)
assert artifact_str == file_contents
+
+
+def test_reflected_types():
+ # GIVEN an event_source decorator
+ @event_source(data_class=APIGatewayProxyEventV2)
+ def lambda_handler(event: APIGatewayProxyEventV2, _):
+ # THEN we except the event to be of the pass in data class type
+ assert isinstance(event, APIGatewayProxyEventV2)
+ assert event.get_header_value("x-foo") == "Foo"
+
+ # WHEN calling the lambda handler
+ lambda_handler({"headers": {"X-Foo": "Foo"}}, None)
From 6465d9ee11ead2b348bd5fca05f4f07ea80bc804 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 7 Jun 2021 08:16:50 +0000
Subject: [PATCH 3/6] chore(deps-dev): bump mkdocs-material from 7.1.6 to 7.1.7
(#464)
Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 7.1.6 to 7.1.7.
Release notes
Sourced from mkdocs-material's releases.
mkdocs-material-7.1.7
- Improved screen reader support
Changelog
Sourced from mkdocs-material's changelog.
7.1.7 _ June 6, 2021
- Improved screen reader support
Commits
3d6cbc9
Updated dependencies
7485a61
Prepare 7.1.7 release
11fcf4f
Added aria labels to language and version selector
2a5ba14
Added aria-label to language selector button (#2725)
42500aa
Added aria labels to palette toggles
0dd4831
Added aria labels to footer links
a201390
Formatting
db83978
Updated note on now removed Docker image for Insiders
86f49eb
Updated dependencies
85d9a56
Updated Insiders changelog
- Additional commits viewable in compare view
[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mkdocs-material&package-manager=pip&previous-version=7.1.6&new-version=7.1.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
---
poetry.lock | 8 ++++----
pyproject.toml | 2 +-
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index fe8d75b0905..7d7537a0dc7 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -591,7 +591,7 @@ mkdocs = ">=0.17"
[[package]]
name = "mkdocs-material"
-version = "7.1.6"
+version = "7.1.7"
description = "A Material Design theme for MkDocs"
category = "dev"
optional = false
@@ -1065,7 +1065,7 @@ pydantic = ["pydantic", "email-validator"]
[metadata]
lock-version = "1.1"
python-versions = "^3.6.1"
-content-hash = "a207f2be8a6e01d19062b6293e455b0b318d45ac96cf562c9fcdf8fcc7cdf5ff"
+content-hash = "3159635f02dd232e8271d6fd4f6b1b92cefb6f8b8ada60bda6929f3839515862"
[metadata.files]
appdirs = [
@@ -1342,8 +1342,8 @@ mkdocs-git-revision-date-plugin = [
{file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"},
]
mkdocs-material = [
- {file = "mkdocs-material-7.1.6.tar.gz", hash = "sha256:b3f1aaea3e79e3c3b30babe0238915cf4ad4c4560d404bb0ac3298ee2ce004a3"},
- {file = "mkdocs_material-7.1.6-py2.py3-none-any.whl", hash = "sha256:01566c460990dad54d6ec935553b9c5c8e4e753ac3e30ba0945ceeff4ad164ac"},
+ {file = "mkdocs-material-7.1.7.tar.gz", hash = "sha256:34d57af1e3e68ff4251feb82ced70545d8aa6064861ba76b1a15928399d21879"},
+ {file = "mkdocs_material-7.1.7-py2.py3-none-any.whl", hash = "sha256:1725d02efed5d989258fd1620673e78a7171f82028f30c2da8d21e7539150221"},
]
mkdocs-material-extensions = [
{file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"},
diff --git a/pyproject.toml b/pyproject.toml
index 94617837137..13f6d723efc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -49,7 +49,7 @@ radon = "^4.5.0"
xenon = "^0.7.3"
flake8-eradicate = "^1.0.0"
flake8-bugbear = "^21.3.2"
-mkdocs-material = "^7.1.6"
+mkdocs-material = "^7.1.7"
mkdocs-git-revision-date-plugin = "^0.3.1"
mike = "^0.6.0"
From b029b5c3ffdbcb8080f4d9313ff36150412c088e Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 7 Jun 2021 08:18:41 +0000
Subject: [PATCH 4/6] chore(deps): bump boto3 from 1.17.87 to 1.17.88 (#463)
Bumps [boto3](https://github.com/boto/boto3) from 1.17.87 to 1.17.88.
Changelog
Sourced from boto3's changelog.
1.17.88
- api-change:
rds
: [botocore
] Documentation updates for RDS: fixing an outdated link to the RDS documentation in DBInstance$DBInstanceStatus
- api-change:
pi
: [botocore
] The new GetDimensionKeyDetails action retrieves the attributes of the specified dimension group for a DB instance or data source.
- api-change:
cloudtrail
: [botocore
] AWS CloudTrail supports data events on new service resources, including Amazon DynamoDB tables and S3 Object Lambda access points.
- api-change:
medialive
: [botocore
] Add support for automatically setting the H.264 adaptive quantization and GOP B-frame fields.
- api-change:
autoscaling
: [botocore
] Documentation updates for Amazon EC2 Auto Scaling
- api-change:
qldb
: [botocore
] Documentation updates for Amazon QLDB
Commits
[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.17.87&new-version=1.17.88)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
---
poetry.lock | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 7d7537a0dc7..cb9bed06939 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -81,20 +81,20 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
[[package]]
name = "boto3"
-version = "1.17.87"
+version = "1.17.88"
description = "The AWS SDK for Python"
category = "main"
optional = false
python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[package.dependencies]
-botocore = ">=1.20.87,<1.21.0"
+botocore = ">=1.20.88,<1.21.0"
jmespath = ">=0.7.1,<1.0.0"
s3transfer = ">=0.4.0,<0.5.0"
[[package]]
name = "botocore"
-version = "1.20.87"
+version = "1.20.88"
description = "Low-level, data-driven core of boto 3."
category = "main"
optional = false
@@ -1092,12 +1092,12 @@ black = [
{file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"},
]
boto3 = [
- {file = "boto3-1.17.87-py2.py3-none-any.whl", hash = "sha256:52025e0af7935cb7036037978de190e41ad7f6716d1de1e3669976e99d084edf"},
- {file = "boto3-1.17.87.tar.gz", hash = "sha256:612aa5dc27b87ae1dc695e194f97af7da0fcc9e97aa80d9740732d78ba117119"},
+ {file = "boto3-1.17.88-py2.py3-none-any.whl", hash = "sha256:13afcc5e2fcc5e4f9eab1ee46a769cf738a259dcd45f71ee79255f18973e4584"},
+ {file = "boto3-1.17.88.tar.gz", hash = "sha256:a715ca6c4457d56ea3e3efde9bdc8be41c29b2f2a904fbd12befdb9cb5e289e4"},
]
botocore = [
- {file = "botocore-1.20.87-py2.py3-none-any.whl", hash = "sha256:3dcc84855349073e0cb706e90a9e1180899deded4b8555698fb4a5a5b3357202"},
- {file = "botocore-1.20.87.tar.gz", hash = "sha256:04a5594ae1886233cb15ab636b51aeecf6b5412231f72744405f11a54a8cda58"},
+ {file = "botocore-1.20.88-py2.py3-none-any.whl", hash = "sha256:be3cb73fab60a2349e2932bd0cbbe7e7736e3a2cd8c05b539d362ff3e406be76"},
+ {file = "botocore-1.20.88.tar.gz", hash = "sha256:bc989edab52d4788aadd8d1aff925f5c6a7cbc68900bfdb8e379965aeac17317"},
]
certifi = [
{file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"},
From a65e55ce7d5468be61fbe043c6e6296387d2e862 Mon Sep 17 00:00:00 2001
From: Michael Brewer
Date: Mon, 7 Jun 2021 21:11:11 -0700
Subject: [PATCH 5/6] feat(data-classes): add AttributeValueType to
DynamoDBStreamEvent (#462)
---
.../data_classes/dynamo_db_stream_event.py | 77 +++++++++++++++-
tests/functional/test_data_classes.py | 90 +++++++++++++++++++
2 files changed, 165 insertions(+), 2 deletions(-)
diff --git a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py
index bc3a4a82995..1ec3d6157bf 100644
--- a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py
+++ b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py
@@ -1,15 +1,42 @@
from enum import Enum
-from typing import Dict, Iterator, List, Optional
+from typing import Any, Dict, Iterator, List, Optional, Union
from aws_lambda_powertools.utilities.data_classes.common import DictWrapper
+class AttributeValueType(Enum):
+ Binary = "B"
+ BinarySet = "BS"
+ Boolean = "BOOL"
+ List = "L"
+ Map = "M"
+ Number = "N"
+ NumberSet = "NS"
+ Null = "NULL"
+ String = "S"
+ StringSet = "SS"
+
+
class AttributeValue(DictWrapper):
"""Represents the data for an attribute
- Documentation: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_AttributeValue.html
+ Documentation:
+ --------------
+ - https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_AttributeValue.html
+ - https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html
"""
+ def __init__(self, data: Dict[str, Any]):
+ """AttributeValue constructor
+
+ Parameters
+ ----------
+ data: Dict[str, Any]
+ Raw lambda event dict
+ """
+ super().__init__(data)
+ self.dynamodb_type = list(data.keys())[0]
+
@property
def b_value(self) -> Optional[str]:
"""An attribute of type Base64-encoded binary data object
@@ -106,6 +133,29 @@ def ss_value(self) -> Optional[List[str]]:
"""
return self.get("SS")
+ @property
+ def get_type(self) -> AttributeValueType:
+ """Get the attribute value type based on the contained data"""
+ return AttributeValueType(self.dynamodb_type)
+
+ @property
+ def l_value(self) -> Optional[List["AttributeValue"]]:
+ """Alias of list_value"""
+ return self.list_value
+
+ @property
+ def m_value(self) -> Optional[Dict[str, "AttributeValue"]]:
+ """Alias of map_value"""
+ return self.map_value
+
+ @property
+ def get_value(self) -> Union[Optional[bool], Optional[str], Optional[List], Optional[Dict]]:
+ """Get the attribute value"""
+ try:
+ return getattr(self, f"{self.dynamodb_type.lower()}_value")
+ except AttributeError:
+ raise TypeError(f"Dynamodb type {self.dynamodb_type} is not supported")
+
def _attribute_value_dict(attr_values: Dict[str, dict], key: str) -> Optional[Dict[str, AttributeValue]]:
"""A dict of type String to AttributeValue object map
@@ -224,6 +274,29 @@ class DynamoDBStreamEvent(DictWrapper):
Documentation:
-------------
- https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html
+
+ Example
+ -------
+ **Process dynamodb stream events and use get_type and get_value for handling conversions**
+
+ from aws_lambda_powertools.utilities.data_classes import event_source, DynamoDBStreamEvent
+ from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import (
+ AttributeValueType,
+ AttributeValue,
+ )
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+ @event_source(data_class=DynamoDBStreamEvent)
+ def lambda_handler(event: DynamoDBStreamEvent, context: LambdaContext):
+ for record in event.records:
+ key: AttributeValue = record.dynamodb.keys["id"]
+ if key == AttributeValueType.Number:
+ assert key.get_value == key.n_value
+ print(key.get_value)
+ elif key == AttributeValueType.Map:
+ assert key.get_value == key.map_value
+ print(key.get_value)
"""
@property
diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py
index 60dfc591897..8b412860694 100644
--- a/tests/functional/test_data_classes.py
+++ b/tests/functional/test_data_classes.py
@@ -58,6 +58,7 @@
)
from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import (
AttributeValue,
+ AttributeValueType,
DynamoDBRecordEventName,
DynamoDBStreamEvent,
StreamViewType,
@@ -443,6 +444,33 @@ def test_dynamo_db_stream_trigger_event():
assert record.user_identity is None
+def test_dynamo_attribute_value_b_value():
+ example_attribute_value = {"B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"}
+
+ attribute_value = AttributeValue(example_attribute_value)
+
+ assert attribute_value.get_type == AttributeValueType.Binary
+ assert attribute_value.b_value == attribute_value.get_value
+
+
+def test_dynamo_attribute_value_bs_value():
+ example_attribute_value = {"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]}
+
+ attribute_value = AttributeValue(example_attribute_value)
+
+ assert attribute_value.get_type == AttributeValueType.BinarySet
+ assert attribute_value.bs_value == attribute_value.get_value
+
+
+def test_dynamo_attribute_value_bool_value():
+ example_attribute_value = {"BOOL": True}
+
+ attribute_value = AttributeValue(example_attribute_value)
+
+ assert attribute_value.get_type == AttributeValueType.Boolean
+ assert attribute_value.bool_value == attribute_value.get_value
+
+
def test_dynamo_attribute_value_list_value():
example_attribute_value = {"L": [{"S": "Cookies"}, {"S": "Coffee"}, {"N": "3.14159"}]}
attribute_value = AttributeValue(example_attribute_value)
@@ -450,6 +478,9 @@ def test_dynamo_attribute_value_list_value():
assert list_value is not None
item = list_value[0]
assert item.s_value == "Cookies"
+ assert attribute_value.get_type == AttributeValueType.List
+ assert attribute_value.l_value == attribute_value.list_value
+ assert attribute_value.list_value == attribute_value.get_value
def test_dynamo_attribute_value_map_value():
@@ -461,6 +492,65 @@ def test_dynamo_attribute_value_map_value():
assert map_value is not None
item = map_value["Name"]
assert item.s_value == "Joe"
+ assert attribute_value.get_type == AttributeValueType.Map
+ assert attribute_value.m_value == attribute_value.map_value
+ assert attribute_value.map_value == attribute_value.get_value
+
+
+def test_dynamo_attribute_value_n_value():
+ example_attribute_value = {"N": "123.45"}
+
+ attribute_value = AttributeValue(example_attribute_value)
+
+ assert attribute_value.get_type == AttributeValueType.Number
+ assert attribute_value.n_value == attribute_value.get_value
+
+
+def test_dynamo_attribute_value_ns_value():
+ example_attribute_value = {"NS": ["42.2", "-19", "7.5", "3.14"]}
+
+ attribute_value = AttributeValue(example_attribute_value)
+
+ assert attribute_value.get_type == AttributeValueType.NumberSet
+ assert attribute_value.ns_value == attribute_value.get_value
+
+
+def test_dynamo_attribute_value_null_value():
+ example_attribute_value = {"NULL": True}
+
+ attribute_value = AttributeValue(example_attribute_value)
+
+ assert attribute_value.get_type == AttributeValueType.Null
+ assert attribute_value.null_value == attribute_value.get_value
+
+
+def test_dynamo_attribute_value_s_value():
+ example_attribute_value = {"S": "Hello"}
+
+ attribute_value = AttributeValue(example_attribute_value)
+
+ assert attribute_value.get_type == AttributeValueType.String
+ assert attribute_value.s_value == attribute_value.get_value
+
+
+def test_dynamo_attribute_value_ss_value():
+ example_attribute_value = {"SS": ["Giraffe", "Hippo", "Zebra"]}
+
+ attribute_value = AttributeValue(example_attribute_value)
+
+ assert attribute_value.get_type == AttributeValueType.StringSet
+ assert attribute_value.ss_value == attribute_value.get_value
+
+
+def test_dynamo_attribute_value_type_error():
+ example_attribute_value = {"UNSUPPORTED": "'value' should raise a type error"}
+
+ attribute_value = AttributeValue(example_attribute_value)
+
+ with pytest.raises(TypeError):
+ print(attribute_value.get_value)
+ with pytest.raises(ValueError):
+ print(attribute_value.get_type)
def test_event_bridge_event():
From 070428aac582148d99e188f0bdf87ef2e76c45d5 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 8 Jun 2021 07:08:56 +0000
Subject: [PATCH 6/6] chore(deps): bump boto3 from 1.17.88 to 1.17.89 (#466)
Bumps [boto3](https://github.com/boto/boto3) from 1.17.88 to 1.17.89.
Changelog
Sourced from boto3's changelog.
1.17.89
- api-change:
sagemaker
: [botocore
] AWS SageMaker - Releasing new APIs related to Callback steps in model building pipelines. Adds experiment integration to model building pipelines.
- api-change:
glue
: [botocore
] Add SampleSize variable to S3Target to enable s3-sampling feature through API.
- api-change:
personalize
: [botocore
] Update regex validation in kmsKeyArn and s3 path API parameters for AWS Personalize APIs
- api-change:
eks
: [botocore
] Added updateConfig option that allows customers to control upgrade velocity in Managed Node Group.
Commits
[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.17.88&new-version=1.17.89)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
---
poetry.lock | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index cb9bed06939..ca9ac489f80 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -81,20 +81,20 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
[[package]]
name = "boto3"
-version = "1.17.88"
+version = "1.17.89"
description = "The AWS SDK for Python"
category = "main"
optional = false
python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[package.dependencies]
-botocore = ">=1.20.88,<1.21.0"
+botocore = ">=1.20.89,<1.21.0"
jmespath = ">=0.7.1,<1.0.0"
s3transfer = ">=0.4.0,<0.5.0"
[[package]]
name = "botocore"
-version = "1.20.88"
+version = "1.20.89"
description = "Low-level, data-driven core of boto 3."
category = "main"
optional = false
@@ -1092,12 +1092,12 @@ black = [
{file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"},
]
boto3 = [
- {file = "boto3-1.17.88-py2.py3-none-any.whl", hash = "sha256:13afcc5e2fcc5e4f9eab1ee46a769cf738a259dcd45f71ee79255f18973e4584"},
- {file = "boto3-1.17.88.tar.gz", hash = "sha256:a715ca6c4457d56ea3e3efde9bdc8be41c29b2f2a904fbd12befdb9cb5e289e4"},
+ {file = "boto3-1.17.89-py2.py3-none-any.whl", hash = "sha256:1f02cd513b130f9cd86c99836de6a0a5f78ea55110bdbc9011d9d78ff0fd3204"},
+ {file = "boto3-1.17.89.tar.gz", hash = "sha256:06d8dca85a0bb66b7bf2721745895d44691c78dbe7eb3b146702aff85e34af34"},
]
botocore = [
- {file = "botocore-1.20.88-py2.py3-none-any.whl", hash = "sha256:be3cb73fab60a2349e2932bd0cbbe7e7736e3a2cd8c05b539d362ff3e406be76"},
- {file = "botocore-1.20.88.tar.gz", hash = "sha256:bc989edab52d4788aadd8d1aff925f5c6a7cbc68900bfdb8e379965aeac17317"},
+ {file = "botocore-1.20.89-py2.py3-none-any.whl", hash = "sha256:e112f9a45db1c5a42f787e4b228a35da6e823bcba70f43f43005b4fb58066446"},
+ {file = "botocore-1.20.89.tar.gz", hash = "sha256:ce0fa8bc260ad187824052805d224cee239d953bb4bfb1e52cf35ad79481b316"},
]
certifi = [
{file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"},