Skip to content

feat(parser): support for S3 Event Notifications via EventBridge #1982

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion aws_lambda_powertools/utilities/parser/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,13 @@
KinesisFirehoseRecordMetadata,
)
from .lambda_function_url import LambdaFunctionUrlModel
from .s3 import S3Model, S3RecordModel
from .s3 import (
S3EventNotificationEventBridgeDetailModel,
S3EventNotificationEventBridgeModel,
S3EventNotificationObjectModel,
S3Model,
S3RecordModel,
)
from .s3_object_event import (
S3ObjectConfiguration,
S3ObjectContext,
Expand Down Expand Up @@ -105,6 +111,9 @@
"S3ObjectUserRequest",
"S3ObjectConfiguration",
"S3ObjectContext",
"S3EventNotificationObjectModel",
"S3EventNotificationEventBridgeModel",
"S3EventNotificationEventBridgeDetailModel",
"SesModel",
"SesRecordModel",
"SesMessage",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
from datetime import datetime
from typing import Any, Dict, List, Optional, Type, Union
from typing import List, Optional

from pydantic import BaseModel, Field

from aws_lambda_powertools.utilities.parser.types import RawDictOrModel


class EventBridgeModel(BaseModel):
version: str
Expand All @@ -13,5 +15,5 @@ class EventBridgeModel(BaseModel):
region: str
resources: List[str]
detail_type: str = Field(None, alias="detail-type")
detail: Union[Dict[str, Any], Type[BaseModel]]
detail: RawDictOrModel
replay_name: Optional[str] = Field(None, alias="replay-name")
33 changes: 33 additions & 0 deletions aws_lambda_powertools/utilities/parser/models/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@

from aws_lambda_powertools.utilities.parser.types import Literal

from .event_bridge import EventBridgeModel


class S3EventRecordGlacierRestoreEventData(BaseModel):
lifecycleRestorationExpiryTime: datetime
Expand Down Expand Up @@ -56,6 +58,37 @@ class S3Message(BaseModel):
object: S3Object # noqa: A003,VNE003


class S3EventNotificationObjectModel(BaseModel):
key: str
size: Optional[NonNegativeFloat]
etag: str
version_id: str = Field(None, alias="version-id")
sequencer: Optional[str]


class S3EventNotificationEventBridgeBucketModel(BaseModel):
name: str


class S3EventNotificationEventBridgeDetailModel(BaseModel):
version: str
bucket: S3EventNotificationEventBridgeBucketModel
object: S3EventNotificationObjectModel # noqa: A003,VNE003
request_id: str = Field(None, alias="request-id")
requester: str
source_ip_address: str = Field(None, alias="source-ip-address")
reason: Optional[str]
deletion_type: Optional[str] = Field(None, alias="deletion-type")
restore_expiry_time: Optional[str] = Field(None, alias="restore-expiry-time")
source_storage_class: Optional[str] = Field(None, alias="source-storage-class")
destination_storage_class: Optional[str] = Field(None, alias="destination-storage-class")
destination_access_tier: Optional[str] = Field(None, alias="destination-access-tier")


class S3EventNotificationEventBridgeModel(EventBridgeModel):
detail: S3EventNotificationEventBridgeDetailModel


class S3RecordModel(BaseModel):
eventVersion: str
eventSource: Literal["aws:s3"]
Expand Down
4 changes: 3 additions & 1 deletion aws_lambda_powertools/utilities/parser/types.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Generics and other shared types used across parser"""

import sys
from typing import TypeVar
from typing import Any, Dict, Type, TypeVar, Union

from pydantic import BaseModel

Expand All @@ -14,3 +14,5 @@
Model = TypeVar("Model", bound=BaseModel)
EnvelopeModel = TypeVar("EnvelopeModel")
EventParserReturnType = TypeVar("EventParserReturnType")
AnyInheritedModel = Union[Type[BaseModel], BaseModel]
RawDictOrModel = Union[Dict[str, Any], AnyInheritedModel]
37 changes: 19 additions & 18 deletions docs/utilities/parser.md
Original file line number Diff line number Diff line change
Expand Up @@ -156,24 +156,25 @@ def my_function():

Parser comes with the following built-in models:

| Model name | Description |
| ------------------------------- | ------------------------------------------------------------------ |
| **DynamoDBStreamModel** | Lambda Event Source payload for Amazon DynamoDB Streams |
| **EventBridgeModel** | Lambda Event Source payload for Amazon EventBridge |
| **SqsModel** | Lambda Event Source payload for Amazon SQS |
| **AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer |
| **CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs |
| **S3Model** | Lambda Event Source payload for Amazon S3 |
| **S3ObjectLambdaEvent** | Lambda Event Source payload for Amazon S3 Object Lambda |
| **KinesisDataStreamModel** | Lambda Event Source payload for Amazon Kinesis Data Streams |
| **KinesisFirehoseModel** | Lambda Event Source payload for Amazon Kinesis Firehose |
| **SesModel** | Lambda Event Source payload for Amazon Simple Email Service |
| **SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service |
| **APIGatewayProxyEventModel** | Lambda Event Source payload for Amazon API Gateway |
| **APIGatewayProxyEventV2Model** | Lambda Event Source payload for Amazon API Gateway v2 payload |
| **LambdaFunctionUrlModel** | Lambda Event Source payload for Lambda Function URL payload |
| **KafkaSelfManagedEventModel** | Lambda Event Source payload for self managed Kafka payload |
| **KafkaMskEventModel** | Lambda Event Source payload for AWS MSK payload |
| Model name | Description |
| --------------------------------------- | ---------------------------------------------------------------------------- |
| **DynamoDBStreamModel** | Lambda Event Source payload for Amazon DynamoDB Streams |
| **EventBridgeModel** | Lambda Event Source payload for Amazon EventBridge |
| **SqsModel** | Lambda Event Source payload for Amazon SQS |
| **AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer |
| **CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs |
| **S3Model** | Lambda Event Source payload for Amazon S3 |
| **S3ObjectLambdaEvent** | Lambda Event Source payload for Amazon S3 Object Lambda |
| **S3EventNotificationEventBridgeModel** | Lambda Event Source payload for Amazon S3 Event Notification to EventBridge. |
| **KinesisDataStreamModel** | Lambda Event Source payload for Amazon Kinesis Data Streams |
| **KinesisFirehoseModel** | Lambda Event Source payload for Amazon Kinesis Firehose |
| **SesModel** | Lambda Event Source payload for Amazon Simple Email Service |
| **SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service |
| **APIGatewayProxyEventModel** | Lambda Event Source payload for Amazon API Gateway |
| **APIGatewayProxyEventV2Model** | Lambda Event Source payload for Amazon API Gateway v2 payload |
| **LambdaFunctionUrlModel** | Lambda Event Source payload for Lambda Function URL payload |
| **KafkaSelfManagedEventModel** | Lambda Event Source payload for self managed Kafka payload |
| **KafkaMskEventModel** | Lambda Event Source payload for AWS MSK payload |

#### Extending built-in models

Expand Down
28 changes: 28 additions & 0 deletions tests/events/s3EventBridgeNotificationObjectCreatedEvent.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
"version": "0",
"id": "f5f1e65c-dc3a-93ca-6c1e-b1647eac7963",
"detail-type": "Object Created",
"source": "aws.s3",
"account": "123456789012",
"time": "2023-03-08T17:50:14Z",
"region": "eu-west-1",
"resources": [
"arn:aws:s3:::example-bucket"
],
"detail": {
"version": "0",
"bucket": {
"name": "example-bucket"
},
"object": {
"key": "IMG_m7fzo3.jpg",
"size": 184662,
"etag": "4e68adba0abe2dc8653dc3354e14c01d",
"sequencer": "006408CAD69598B05E"
},
"request-id": "57H08PA84AB1JZW0",
"requester": "123456789012",
"source-ip-address": "34.252.34.74",
"reason": "PutObject"
}
}
29 changes: 29 additions & 0 deletions tests/events/s3EventBridgeNotificationObjectDeletedEvent.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
{
"version": "0",
"id": "2ee9cc15-d022-99ea-1fb8-1b1bac4850f9",
"detail-type": "Object Deleted",
"source": "aws.s3",
"account": "111122223333",
"time": "2021-11-12T00:00:00Z",
"region": "ca-central-1",
"resources": [
"arn:aws:s3:::example-bucket"
],
"detail": {
"version": "0",
"bucket": {
"name": "example-bucket"
},
"object": {
"key": "IMG_m7fzo3.jpg",
"size": 184662,
"etag": "4e68adba0abe2dc8653dc3354e14c01d",
"sequencer": "006408CAD69598B05E"
},
"request-id": "0BH729840619AG5K",
"requester": "123456789012",
"source-ip-address": "34.252.34.74",
"reason": "DeleteObject",
"deletion-type": "Delete Marker Created"
}
}
28 changes: 28 additions & 0 deletions tests/events/s3EventBridgeNotificationObjectExpiredEvent.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
"version": "0",
"id": "ad1de317-e409-eba2-9552-30113f8d88e3",
"detail-type": "Object Deleted",
"source": "aws.s3",
"account": "111122223333",
"time": "2021-11-12T00:00:00Z",
"region": "ca-central-1",
"resources": [
"arn:aws:s3:::example-bucket"
],
"detail": {
"version": "0",
"bucket": {
"name": "example-bucket"
},
"object": {
"key": "IMG_m7fzo3.jpg",
"size": 184662,
"etag": "4e68adba0abe2dc8653dc3354e14c01d",
"sequencer": "006408CAD69598B05E"
},
"request-id": "20EB74C14654DC47",
"requester": "s3.amazonaws.com",
"reason": "Lifecycle Expiration",
"deletion-type": "Delete Marker Created"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
"version": "0",
"id": "6924de0d-13e2-6bbf-c0c1-b903b753565e",
"detail-type": "Object Restore Completed",
"source": "aws.s3",
"account": "111122223333",
"time": "2021-11-12T00:00:00Z",
"region": "ca-central-1",
"resources": [
"arn:aws:s3:::example-bucket"
],
"detail": {
"version": "0",
"bucket": {
"name": "example-bucket"
},
"object": {
"key": "IMG_m7fzo3.jpg",
"size": 184662,
"etag": "4e68adba0abe2dc8653dc3354e14c01d",
"sequencer": "006408CAD69598B05E"
},
"request-id": "189F19CB7FB1B6A4",
"requester": "s3.amazonaws.com",
"restore-expiry-time": "2021-11-13T00:00:00Z",
"source-storage-class": "GLACIER"
}
}
Empty file added tests/unit/parser/__init__.py
Empty file.
107 changes: 107 additions & 0 deletions tests/unit/parser/test_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
from datetime import datetime

from aws_lambda_powertools.utilities.parser.models import (
S3EventNotificationEventBridgeModel,
)
from tests.functional.utils import load_event


def test_s3_eventbridge_notification_object_created_event():
raw_event = load_event("s3EventBridgeNotificationObjectCreatedEvent.json")
model = S3EventNotificationEventBridgeModel(**raw_event)

assert model.version == raw_event["version"]
assert model.id == raw_event["id"]
assert model.detail_type == raw_event["detail-type"]
assert model.source == raw_event["source"]
assert model.account == raw_event["account"]
assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00"))
assert model.region == raw_event["region"]
assert model.resources == raw_event["resources"]

assert model.detail.version == raw_event["detail"]["version"]
assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"]
assert model.detail.object.key == raw_event["detail"]["object"]["key"]
assert model.detail.object.size == raw_event["detail"]["object"]["size"]
assert model.detail.object.etag == raw_event["detail"]["object"]["etag"]
assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"]
assert model.detail.request_id == raw_event["detail"]["request-id"]
assert model.detail.requester == raw_event["detail"]["requester"]
assert model.detail.source_ip_address == raw_event["detail"]["source-ip-address"]
assert model.detail.reason == raw_event["detail"]["reason"]


def test_s3_eventbridge_notification_object_deleted_event():
raw_event = load_event("s3EventBridgeNotificationObjectDeletedEvent.json")
model = S3EventNotificationEventBridgeModel(**raw_event)

assert model.version == raw_event["version"]
assert model.id == raw_event["id"]
assert model.detail_type == raw_event["detail-type"]
assert model.source == raw_event["source"]
assert model.account == raw_event["account"]
assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00"))
assert model.region == raw_event["region"]
assert model.resources == raw_event["resources"]

assert model.detail.version == raw_event["detail"]["version"]
assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"]
assert model.detail.object.key == raw_event["detail"]["object"]["key"]
assert model.detail.object.size == raw_event["detail"]["object"]["size"]
assert model.detail.object.etag == raw_event["detail"]["object"]["etag"]
assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"]
assert model.detail.request_id == raw_event["detail"]["request-id"]
assert model.detail.requester == raw_event["detail"]["requester"]
assert model.detail.source_ip_address == raw_event["detail"]["source-ip-address"]
assert model.detail.reason == raw_event["detail"]["reason"]
assert model.detail.deletion_type == raw_event["detail"]["deletion-type"]


def test_s3_eventbridge_notification_object_expired_event():
raw_event = load_event("s3EventBridgeNotificationObjectExpiredEvent.json")
model = S3EventNotificationEventBridgeModel(**raw_event)

assert model.version == raw_event["version"]
assert model.id == raw_event["id"]
assert model.detail_type == raw_event["detail-type"]
assert model.source == raw_event["source"]
assert model.account == raw_event["account"]
assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00"))
assert model.region == raw_event["region"]
assert model.resources == raw_event["resources"]

assert model.detail.version == raw_event["detail"]["version"]
assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"]
assert model.detail.object.key == raw_event["detail"]["object"]["key"]
assert model.detail.object.size == raw_event["detail"]["object"]["size"]
assert model.detail.object.etag == raw_event["detail"]["object"]["etag"]
assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"]
assert model.detail.request_id == raw_event["detail"]["request-id"]
assert model.detail.requester == raw_event["detail"]["requester"]
assert model.detail.reason == raw_event["detail"]["reason"]
assert model.detail.deletion_type == raw_event["detail"]["deletion-type"]


def test_s3_eventbridge_notification_object_restore_completed_event():
raw_event = load_event("s3EventBridgeNotificationObjectRestoreCompletedEvent.json")
model = S3EventNotificationEventBridgeModel(**raw_event)

assert model.version == raw_event["version"]
assert model.id == raw_event["id"]
assert model.detail_type == raw_event["detail-type"]
assert model.source == raw_event["source"]
assert model.account == raw_event["account"]
assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00"))
assert model.region == raw_event["region"]
assert model.resources == raw_event["resources"]

assert model.detail.version == raw_event["detail"]["version"]
assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"]
assert model.detail.object.key == raw_event["detail"]["object"]["key"]
assert model.detail.object.size == raw_event["detail"]["object"]["size"]
assert model.detail.object.etag == raw_event["detail"]["object"]["etag"]
assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"]
assert model.detail.request_id == raw_event["detail"]["request-id"]
assert model.detail.requester == raw_event["detail"]["requester"]
assert model.detail.restore_expiry_time == raw_event["detail"]["restore-expiry-time"]
assert model.detail.source_storage_class == raw_event["detail"]["source-storage-class"]